|
{ |
|
"best_global_step": 400, |
|
"best_metric": 0.9147540983606557, |
|
"best_model_checkpoint": "./modelka_bebro/checkpoint-400", |
|
"epoch": 20.0, |
|
"eval_steps": 20, |
|
"global_step": 860, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.023255813953488372, |
|
"grad_norm": 0.4809555113315582, |
|
"learning_rate": 0.0, |
|
"loss": 0.6913, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.046511627906976744, |
|
"grad_norm": 0.4111059904098511, |
|
"learning_rate": 5.813953488372093e-07, |
|
"loss": 0.6908, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.06976744186046512, |
|
"grad_norm": 0.4677865207195282, |
|
"learning_rate": 1.1627906976744186e-06, |
|
"loss": 0.6945, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.09302325581395349, |
|
"grad_norm": 0.42459365725517273, |
|
"learning_rate": 1.744186046511628e-06, |
|
"loss": 0.6974, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"grad_norm": 0.5455716252326965, |
|
"learning_rate": 2.325581395348837e-06, |
|
"loss": 0.6876, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.13953488372093023, |
|
"grad_norm": 0.3835294842720032, |
|
"learning_rate": 2.9069767441860468e-06, |
|
"loss": 0.6882, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.16279069767441862, |
|
"grad_norm": 0.6538524627685547, |
|
"learning_rate": 3.488372093023256e-06, |
|
"loss": 0.6888, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.18604651162790697, |
|
"grad_norm": 0.40157362818717957, |
|
"learning_rate": 4.0697674418604655e-06, |
|
"loss": 0.6855, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.20930232558139536, |
|
"grad_norm": 0.43001478910446167, |
|
"learning_rate": 4.651162790697674e-06, |
|
"loss": 0.6918, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"grad_norm": 0.5613086819648743, |
|
"learning_rate": 5.232558139534884e-06, |
|
"loss": 0.6977, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.2558139534883721, |
|
"grad_norm": 0.3894985020160675, |
|
"learning_rate": 5.8139534883720935e-06, |
|
"loss": 0.6847, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.27906976744186046, |
|
"grad_norm": 0.4243035912513733, |
|
"learning_rate": 6.395348837209303e-06, |
|
"loss": 0.6934, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.3023255813953488, |
|
"grad_norm": 0.5996053218841553, |
|
"learning_rate": 6.976744186046512e-06, |
|
"loss": 0.6966, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.32558139534883723, |
|
"grad_norm": 0.5445254445075989, |
|
"learning_rate": 7.558139534883721e-06, |
|
"loss": 0.695, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.3488372093023256, |
|
"grad_norm": 0.433915913105011, |
|
"learning_rate": 8.139534883720931e-06, |
|
"loss": 0.6855, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.37209302325581395, |
|
"grad_norm": 0.43263381719589233, |
|
"learning_rate": 8.72093023255814e-06, |
|
"loss": 0.6901, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.3953488372093023, |
|
"grad_norm": 0.5314894318580627, |
|
"learning_rate": 9.302325581395349e-06, |
|
"loss": 0.6966, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.4186046511627907, |
|
"grad_norm": 0.3562123477458954, |
|
"learning_rate": 9.883720930232558e-06, |
|
"loss": 0.6877, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.4418604651162791, |
|
"grad_norm": 0.4751898944377899, |
|
"learning_rate": 1.0465116279069768e-05, |
|
"loss": 0.6834, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.46511627906976744, |
|
"grad_norm": 0.4273492395877838, |
|
"learning_rate": 1.1046511627906977e-05, |
|
"loss": 0.6885, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.46511627906976744, |
|
"eval_accuracy": 0.5901639344262295, |
|
"eval_f1": 0.5314093976229397, |
|
"eval_loss": 0.68656325340271, |
|
"eval_precision": 0.6364057433997221, |
|
"eval_recall": 0.5761178599121219, |
|
"eval_runtime": 0.0751, |
|
"eval_samples_per_second": 4062.686, |
|
"eval_steps_per_second": 66.601, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.4883720930232558, |
|
"grad_norm": 0.5205795764923096, |
|
"learning_rate": 1.1627906976744187e-05, |
|
"loss": 0.6908, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.5116279069767442, |
|
"grad_norm": 0.4466778635978699, |
|
"learning_rate": 1.2209302325581395e-05, |
|
"loss": 0.6861, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.5348837209302325, |
|
"grad_norm": 0.44298067688941956, |
|
"learning_rate": 1.2790697674418606e-05, |
|
"loss": 0.6832, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.5581395348837209, |
|
"grad_norm": 0.39592233300209045, |
|
"learning_rate": 1.3372093023255814e-05, |
|
"loss": 0.6877, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.5813953488372093, |
|
"grad_norm": 0.4810555875301361, |
|
"learning_rate": 1.3953488372093024e-05, |
|
"loss": 0.68, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.6046511627906976, |
|
"grad_norm": 0.47530311346054077, |
|
"learning_rate": 1.4534883720930233e-05, |
|
"loss": 0.6837, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.627906976744186, |
|
"grad_norm": 0.5110817551612854, |
|
"learning_rate": 1.5116279069767441e-05, |
|
"loss": 0.6908, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.6511627906976745, |
|
"grad_norm": 0.44798141717910767, |
|
"learning_rate": 1.569767441860465e-05, |
|
"loss": 0.6756, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.6744186046511628, |
|
"grad_norm": 0.4494743347167969, |
|
"learning_rate": 1.6279069767441862e-05, |
|
"loss": 0.6813, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.6976744186046512, |
|
"grad_norm": 0.4449148178100586, |
|
"learning_rate": 1.686046511627907e-05, |
|
"loss": 0.6786, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.7209302325581395, |
|
"grad_norm": 0.49257639050483704, |
|
"learning_rate": 1.744186046511628e-05, |
|
"loss": 0.6821, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.7441860465116279, |
|
"grad_norm": 0.49307772517204285, |
|
"learning_rate": 1.802325581395349e-05, |
|
"loss": 0.6856, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.7674418604651163, |
|
"grad_norm": 0.4002581536769867, |
|
"learning_rate": 1.8604651162790697e-05, |
|
"loss": 0.6804, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.7906976744186046, |
|
"grad_norm": 0.44210293889045715, |
|
"learning_rate": 1.918604651162791e-05, |
|
"loss": 0.6739, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.813953488372093, |
|
"grad_norm": 0.3651827871799469, |
|
"learning_rate": 1.9767441860465116e-05, |
|
"loss": 0.6839, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.8372093023255814, |
|
"grad_norm": 0.5120950937271118, |
|
"learning_rate": 2.0348837209302328e-05, |
|
"loss": 0.6746, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.8604651162790697, |
|
"grad_norm": 0.4111502766609192, |
|
"learning_rate": 2.0930232558139536e-05, |
|
"loss": 0.6805, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.8837209302325582, |
|
"grad_norm": 0.44530022144317627, |
|
"learning_rate": 2.1511627906976744e-05, |
|
"loss": 0.6833, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.9069767441860465, |
|
"grad_norm": 0.4006333649158478, |
|
"learning_rate": 2.2093023255813955e-05, |
|
"loss": 0.6769, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.9302325581395349, |
|
"grad_norm": 0.4231868386268616, |
|
"learning_rate": 2.2674418604651163e-05, |
|
"loss": 0.6721, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.9302325581395349, |
|
"eval_accuracy": 0.659016393442623, |
|
"eval_f1": 0.649208174820844, |
|
"eval_loss": 0.6683934926986694, |
|
"eval_precision": 0.6947368421052631, |
|
"eval_recall": 0.6673559059188421, |
|
"eval_runtime": 0.0782, |
|
"eval_samples_per_second": 3899.573, |
|
"eval_steps_per_second": 63.927, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.9534883720930233, |
|
"grad_norm": 0.4570966064929962, |
|
"learning_rate": 2.3255813953488374e-05, |
|
"loss": 0.679, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.9767441860465116, |
|
"grad_norm": 0.6141464710235596, |
|
"learning_rate": 2.3837209302325582e-05, |
|
"loss": 0.6768, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.6506465673446655, |
|
"learning_rate": 2.441860465116279e-05, |
|
"loss": 0.6573, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.0232558139534884, |
|
"grad_norm": 1.0706639289855957, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.6865, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.0465116279069768, |
|
"grad_norm": 0.4623822867870331, |
|
"learning_rate": 2.5581395348837212e-05, |
|
"loss": 0.6538, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.069767441860465, |
|
"grad_norm": 0.4986269176006317, |
|
"learning_rate": 2.616279069767442e-05, |
|
"loss": 0.6585, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.0930232558139534, |
|
"grad_norm": 0.48053082823753357, |
|
"learning_rate": 2.674418604651163e-05, |
|
"loss": 0.6506, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.1162790697674418, |
|
"grad_norm": 0.5987160801887512, |
|
"learning_rate": 2.7325581395348836e-05, |
|
"loss": 0.6691, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.1395348837209303, |
|
"grad_norm": 0.685001015663147, |
|
"learning_rate": 2.7906976744186048e-05, |
|
"loss": 0.6512, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.1627906976744187, |
|
"grad_norm": 0.5840403437614441, |
|
"learning_rate": 2.848837209302326e-05, |
|
"loss": 0.6759, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.1860465116279069, |
|
"grad_norm": 0.6035524606704712, |
|
"learning_rate": 2.9069767441860467e-05, |
|
"loss": 0.6502, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.2093023255813953, |
|
"grad_norm": 0.6236196160316467, |
|
"learning_rate": 2.9651162790697678e-05, |
|
"loss": 0.6575, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.2325581395348837, |
|
"grad_norm": 0.5840880274772644, |
|
"learning_rate": 3.0232558139534883e-05, |
|
"loss": 0.6438, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.255813953488372, |
|
"grad_norm": 0.7563631534576416, |
|
"learning_rate": 3.081395348837209e-05, |
|
"loss": 0.645, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.2790697674418605, |
|
"grad_norm": 0.8720300793647766, |
|
"learning_rate": 3.13953488372093e-05, |
|
"loss": 0.6551, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.302325581395349, |
|
"grad_norm": 0.5763499140739441, |
|
"learning_rate": 3.197674418604651e-05, |
|
"loss": 0.6304, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.3255813953488373, |
|
"grad_norm": 0.5487861037254333, |
|
"learning_rate": 3.2558139534883724e-05, |
|
"loss": 0.6373, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.3488372093023255, |
|
"grad_norm": 0.5714741945266724, |
|
"learning_rate": 3.313953488372093e-05, |
|
"loss": 0.638, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.372093023255814, |
|
"grad_norm": 0.7604700326919556, |
|
"learning_rate": 3.372093023255814e-05, |
|
"loss": 0.6537, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.3953488372093024, |
|
"grad_norm": 0.8128901720046997, |
|
"learning_rate": 3.430232558139535e-05, |
|
"loss": 0.6573, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.3953488372093024, |
|
"eval_accuracy": 0.760655737704918, |
|
"eval_f1": 0.7589037239168805, |
|
"eval_loss": 0.6135660409927368, |
|
"eval_precision": 0.7778153153153153, |
|
"eval_recall": 0.7656801929869905, |
|
"eval_runtime": 0.0732, |
|
"eval_samples_per_second": 4169.193, |
|
"eval_steps_per_second": 68.347, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.4186046511627908, |
|
"grad_norm": 0.6136202216148376, |
|
"learning_rate": 3.488372093023256e-05, |
|
"loss": 0.6257, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.441860465116279, |
|
"grad_norm": 0.6842197179794312, |
|
"learning_rate": 3.5465116279069774e-05, |
|
"loss": 0.6065, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.4651162790697674, |
|
"grad_norm": 0.6682329177856445, |
|
"learning_rate": 3.604651162790698e-05, |
|
"loss": 0.6114, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.4883720930232558, |
|
"grad_norm": 0.7913668155670166, |
|
"learning_rate": 3.662790697674418e-05, |
|
"loss": 0.636, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.5116279069767442, |
|
"grad_norm": 0.7657461762428284, |
|
"learning_rate": 3.7209302325581394e-05, |
|
"loss": 0.6067, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.5348837209302326, |
|
"grad_norm": 0.9346985816955566, |
|
"learning_rate": 3.7790697674418606e-05, |
|
"loss": 0.5902, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.558139534883721, |
|
"grad_norm": 0.7872809767723083, |
|
"learning_rate": 3.837209302325582e-05, |
|
"loss": 0.5731, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.5813953488372094, |
|
"grad_norm": 0.8864495754241943, |
|
"learning_rate": 3.895348837209303e-05, |
|
"loss": 0.581, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.6046511627906976, |
|
"grad_norm": 0.8121451139450073, |
|
"learning_rate": 3.953488372093023e-05, |
|
"loss": 0.5734, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.627906976744186, |
|
"grad_norm": 1.0192168951034546, |
|
"learning_rate": 4.0116279069767444e-05, |
|
"loss": 0.5645, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.6511627906976745, |
|
"grad_norm": 0.9971368312835693, |
|
"learning_rate": 4.0697674418604655e-05, |
|
"loss": 0.5948, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.6744186046511627, |
|
"grad_norm": 0.9530318975448608, |
|
"learning_rate": 4.127906976744187e-05, |
|
"loss": 0.5129, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.697674418604651, |
|
"grad_norm": 0.980096161365509, |
|
"learning_rate": 4.186046511627907e-05, |
|
"loss": 0.5767, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.7209302325581395, |
|
"grad_norm": 0.991642951965332, |
|
"learning_rate": 4.2441860465116276e-05, |
|
"loss": 0.5403, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.744186046511628, |
|
"grad_norm": 0.8262429237365723, |
|
"learning_rate": 4.302325581395349e-05, |
|
"loss": 0.5467, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.7674418604651163, |
|
"grad_norm": 1.1981412172317505, |
|
"learning_rate": 4.36046511627907e-05, |
|
"loss": 0.5081, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.7906976744186047, |
|
"grad_norm": 0.9658230543136597, |
|
"learning_rate": 4.418604651162791e-05, |
|
"loss": 0.5647, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.8139534883720931, |
|
"grad_norm": 1.1805201768875122, |
|
"learning_rate": 4.476744186046512e-05, |
|
"loss": 0.4536, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.8372093023255816, |
|
"grad_norm": 1.101920485496521, |
|
"learning_rate": 4.5348837209302326e-05, |
|
"loss": 0.5361, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.8604651162790697, |
|
"grad_norm": 1.0350321531295776, |
|
"learning_rate": 4.593023255813954e-05, |
|
"loss": 0.5207, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.8604651162790697, |
|
"eval_accuracy": 0.8032786885245902, |
|
"eval_f1": 0.8030224748127099, |
|
"eval_loss": 0.4702507555484772, |
|
"eval_precision": 0.8029135823721811, |
|
"eval_recall": 0.8032006547772896, |
|
"eval_runtime": 0.0743, |
|
"eval_samples_per_second": 4102.555, |
|
"eval_steps_per_second": 67.255, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.8837209302325582, |
|
"grad_norm": 1.0638865232467651, |
|
"learning_rate": 4.651162790697675e-05, |
|
"loss": 0.5351, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.9069767441860463, |
|
"grad_norm": 1.7043132781982422, |
|
"learning_rate": 4.709302325581396e-05, |
|
"loss": 0.4186, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.9302325581395348, |
|
"grad_norm": 1.0048333406448364, |
|
"learning_rate": 4.7674418604651164e-05, |
|
"loss": 0.5134, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.9534883720930232, |
|
"grad_norm": 1.5068817138671875, |
|
"learning_rate": 4.8255813953488375e-05, |
|
"loss": 0.4723, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.9767441860465116, |
|
"grad_norm": 1.1574243307113647, |
|
"learning_rate": 4.883720930232558e-05, |
|
"loss": 0.4904, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.2925702333450317, |
|
"learning_rate": 4.941860465116279e-05, |
|
"loss": 0.5023, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.0232558139534884, |
|
"grad_norm": 1.679551124572754, |
|
"learning_rate": 5e-05, |
|
"loss": 0.4305, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.046511627906977, |
|
"grad_norm": 1.1237919330596924, |
|
"learning_rate": 5.0581395348837214e-05, |
|
"loss": 0.475, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.0697674418604652, |
|
"grad_norm": 1.6137198209762573, |
|
"learning_rate": 5.1162790697674425e-05, |
|
"loss": 0.4115, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.0930232558139537, |
|
"grad_norm": 1.814559817314148, |
|
"learning_rate": 5.1744186046511636e-05, |
|
"loss": 0.4894, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.116279069767442, |
|
"grad_norm": 2.1933960914611816, |
|
"learning_rate": 5.232558139534884e-05, |
|
"loss": 0.3742, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.13953488372093, |
|
"grad_norm": 1.3443443775177002, |
|
"learning_rate": 5.290697674418605e-05, |
|
"loss": 0.396, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.1627906976744184, |
|
"grad_norm": 1.5833871364593506, |
|
"learning_rate": 5.348837209302326e-05, |
|
"loss": 0.3518, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.186046511627907, |
|
"grad_norm": 1.5842630863189697, |
|
"learning_rate": 5.406976744186046e-05, |
|
"loss": 0.4174, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.2093023255813953, |
|
"grad_norm": 1.2995549440383911, |
|
"learning_rate": 5.465116279069767e-05, |
|
"loss": 0.3873, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 2.2325581395348837, |
|
"grad_norm": 2.144974946975708, |
|
"learning_rate": 5.5232558139534884e-05, |
|
"loss": 0.4805, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 2.255813953488372, |
|
"grad_norm": 1.4312434196472168, |
|
"learning_rate": 5.5813953488372095e-05, |
|
"loss": 0.4575, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 2.2790697674418605, |
|
"grad_norm": 2.25107741355896, |
|
"learning_rate": 5.6395348837209306e-05, |
|
"loss": 0.4139, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 2.302325581395349, |
|
"grad_norm": 1.6673591136932373, |
|
"learning_rate": 5.697674418604652e-05, |
|
"loss": 0.4151, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 2.3255813953488373, |
|
"grad_norm": 1.3179768323898315, |
|
"learning_rate": 5.755813953488373e-05, |
|
"loss": 0.2997, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.3255813953488373, |
|
"eval_accuracy": 0.839344262295082, |
|
"eval_f1": 0.839344262295082, |
|
"eval_loss": 0.3802424669265747, |
|
"eval_precision": 0.8408718876540019, |
|
"eval_recall": 0.8408718876540019, |
|
"eval_runtime": 0.0744, |
|
"eval_samples_per_second": 4101.555, |
|
"eval_steps_per_second": 67.239, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.3488372093023258, |
|
"grad_norm": 2.245741844177246, |
|
"learning_rate": 5.8139534883720933e-05, |
|
"loss": 0.3442, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 2.3720930232558137, |
|
"grad_norm": 2.9141011238098145, |
|
"learning_rate": 5.8720930232558145e-05, |
|
"loss": 0.3489, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 2.395348837209302, |
|
"grad_norm": 1.829503059387207, |
|
"learning_rate": 5.9302325581395356e-05, |
|
"loss": 0.3544, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 2.4186046511627906, |
|
"grad_norm": 2.233344793319702, |
|
"learning_rate": 5.9883720930232554e-05, |
|
"loss": 0.3008, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 2.441860465116279, |
|
"grad_norm": 2.4595718383789062, |
|
"learning_rate": 6.0465116279069765e-05, |
|
"loss": 0.3853, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 2.4651162790697674, |
|
"grad_norm": 1.539181113243103, |
|
"learning_rate": 6.104651162790698e-05, |
|
"loss": 0.3165, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 2.488372093023256, |
|
"grad_norm": 1.7545955181121826, |
|
"learning_rate": 6.162790697674418e-05, |
|
"loss": 0.3907, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 2.511627906976744, |
|
"grad_norm": 3.285968065261841, |
|
"learning_rate": 6.22093023255814e-05, |
|
"loss": 0.4645, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 2.5348837209302326, |
|
"grad_norm": 4.625302314758301, |
|
"learning_rate": 6.27906976744186e-05, |
|
"loss": 0.2975, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 2.558139534883721, |
|
"grad_norm": 1.9037487506866455, |
|
"learning_rate": 6.337209302325582e-05, |
|
"loss": 0.3629, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.5813953488372094, |
|
"grad_norm": 2.768249988555908, |
|
"learning_rate": 6.395348837209303e-05, |
|
"loss": 0.385, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 2.604651162790698, |
|
"grad_norm": 1.8411540985107422, |
|
"learning_rate": 6.453488372093024e-05, |
|
"loss": 0.2562, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 2.6279069767441863, |
|
"grad_norm": 2.3537325859069824, |
|
"learning_rate": 6.511627906976745e-05, |
|
"loss": 0.2486, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 2.6511627906976747, |
|
"grad_norm": 4.455109596252441, |
|
"learning_rate": 6.569767441860465e-05, |
|
"loss": 0.2424, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.6744186046511627, |
|
"grad_norm": 3.032252550125122, |
|
"learning_rate": 6.627906976744186e-05, |
|
"loss": 0.3554, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.697674418604651, |
|
"grad_norm": 2.142354965209961, |
|
"learning_rate": 6.686046511627908e-05, |
|
"loss": 0.3701, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 2.7209302325581395, |
|
"grad_norm": 3.0141141414642334, |
|
"learning_rate": 6.744186046511628e-05, |
|
"loss": 0.4602, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 2.744186046511628, |
|
"grad_norm": 2.129042625427246, |
|
"learning_rate": 6.802325581395348e-05, |
|
"loss": 0.4295, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 2.7674418604651163, |
|
"grad_norm": 4.583343029022217, |
|
"learning_rate": 6.86046511627907e-05, |
|
"loss": 0.3599, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 2.7906976744186047, |
|
"grad_norm": 2.799339532852173, |
|
"learning_rate": 6.918604651162791e-05, |
|
"loss": 0.4383, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.7906976744186047, |
|
"eval_accuracy": 0.8426229508196721, |
|
"eval_f1": 0.8426077234024254, |
|
"eval_loss": 0.30933016538619995, |
|
"eval_precision": 0.8432928927895371, |
|
"eval_recall": 0.8437365382958559, |
|
"eval_runtime": 0.0741, |
|
"eval_samples_per_second": 4116.681, |
|
"eval_steps_per_second": 67.487, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.813953488372093, |
|
"grad_norm": 3.329214096069336, |
|
"learning_rate": 6.976744186046513e-05, |
|
"loss": 0.2955, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 2.8372093023255816, |
|
"grad_norm": 1.7641863822937012, |
|
"learning_rate": 7.034883720930233e-05, |
|
"loss": 0.372, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 2.8604651162790695, |
|
"grad_norm": 2.8948066234588623, |
|
"learning_rate": 7.093023255813955e-05, |
|
"loss": 0.3738, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.883720930232558, |
|
"grad_norm": 3.2698655128479004, |
|
"learning_rate": 7.151162790697675e-05, |
|
"loss": 0.4183, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 2.9069767441860463, |
|
"grad_norm": 2.3984827995300293, |
|
"learning_rate": 7.209302325581396e-05, |
|
"loss": 0.3729, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.9302325581395348, |
|
"grad_norm": 2.4366798400878906, |
|
"learning_rate": 7.267441860465116e-05, |
|
"loss": 0.291, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.953488372093023, |
|
"grad_norm": 1.6613179445266724, |
|
"learning_rate": 7.325581395348837e-05, |
|
"loss": 0.2673, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 2.9767441860465116, |
|
"grad_norm": 3.9009366035461426, |
|
"learning_rate": 7.383720930232558e-05, |
|
"loss": 0.3912, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 6.0683417320251465, |
|
"learning_rate": 7.441860465116279e-05, |
|
"loss": 0.3879, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 3.0232558139534884, |
|
"grad_norm": 2.161290407180786, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.2878, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 3.046511627906977, |
|
"grad_norm": 5.449894905090332, |
|
"learning_rate": 7.558139534883721e-05, |
|
"loss": 0.3326, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 3.0697674418604652, |
|
"grad_norm": 2.6764752864837646, |
|
"learning_rate": 7.616279069767443e-05, |
|
"loss": 0.2813, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 3.0930232558139537, |
|
"grad_norm": 1.1198005676269531, |
|
"learning_rate": 7.674418604651163e-05, |
|
"loss": 0.2651, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 3.116279069767442, |
|
"grad_norm": 1.7498178482055664, |
|
"learning_rate": 7.732558139534884e-05, |
|
"loss": 0.2171, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 3.13953488372093, |
|
"grad_norm": 4.505956649780273, |
|
"learning_rate": 7.790697674418606e-05, |
|
"loss": 0.2529, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 3.1627906976744184, |
|
"grad_norm": 2.509193181991577, |
|
"learning_rate": 7.848837209302326e-05, |
|
"loss": 0.2102, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 3.186046511627907, |
|
"grad_norm": 1.7064465284347534, |
|
"learning_rate": 7.906976744186047e-05, |
|
"loss": 0.2097, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 3.2093023255813953, |
|
"grad_norm": 1.7659178972244263, |
|
"learning_rate": 7.965116279069767e-05, |
|
"loss": 0.1806, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 3.2325581395348837, |
|
"grad_norm": 3.508608818054199, |
|
"learning_rate": 8.023255813953489e-05, |
|
"loss": 0.1419, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 3.255813953488372, |
|
"grad_norm": 2.2445175647735596, |
|
"learning_rate": 8.081395348837209e-05, |
|
"loss": 0.4201, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 3.255813953488372, |
|
"eval_accuracy": 0.8622950819672132, |
|
"eval_f1": 0.8622225090345896, |
|
"eval_loss": 0.31382298469543457, |
|
"eval_precision": 0.8622291021671826, |
|
"eval_recall": 0.8628844662703541, |
|
"eval_runtime": 0.0729, |
|
"eval_samples_per_second": 4184.713, |
|
"eval_steps_per_second": 68.602, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 3.2790697674418605, |
|
"grad_norm": 2.019263744354248, |
|
"learning_rate": 8.139534883720931e-05, |
|
"loss": 0.2023, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 3.302325581395349, |
|
"grad_norm": 1.6362420320510864, |
|
"learning_rate": 8.197674418604652e-05, |
|
"loss": 0.1729, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 3.3255813953488373, |
|
"grad_norm": 2.1134684085845947, |
|
"learning_rate": 8.255813953488373e-05, |
|
"loss": 0.2688, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 3.3488372093023258, |
|
"grad_norm": 3.522068738937378, |
|
"learning_rate": 8.313953488372094e-05, |
|
"loss": 0.199, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 3.3720930232558137, |
|
"grad_norm": 4.994085788726807, |
|
"learning_rate": 8.372093023255814e-05, |
|
"loss": 0.2641, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 3.395348837209302, |
|
"grad_norm": 2.1998469829559326, |
|
"learning_rate": 8.430232558139536e-05, |
|
"loss": 0.2183, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 3.4186046511627906, |
|
"grad_norm": 1.9816820621490479, |
|
"learning_rate": 8.488372093023255e-05, |
|
"loss": 0.2952, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 3.441860465116279, |
|
"grad_norm": 2.8008265495300293, |
|
"learning_rate": 8.546511627906977e-05, |
|
"loss": 0.2562, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 3.4651162790697674, |
|
"grad_norm": 2.082418918609619, |
|
"learning_rate": 8.604651162790697e-05, |
|
"loss": 0.2626, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 3.488372093023256, |
|
"grad_norm": 3.3041298389434814, |
|
"learning_rate": 8.662790697674419e-05, |
|
"loss": 0.2373, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 3.511627906976744, |
|
"grad_norm": 2.62347674369812, |
|
"learning_rate": 8.72093023255814e-05, |
|
"loss": 0.2749, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 3.5348837209302326, |
|
"grad_norm": 3.0671257972717285, |
|
"learning_rate": 8.779069767441861e-05, |
|
"loss": 0.3538, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 3.558139534883721, |
|
"grad_norm": 2.737697124481201, |
|
"learning_rate": 8.837209302325582e-05, |
|
"loss": 0.3537, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 3.5813953488372094, |
|
"grad_norm": 6.468879699707031, |
|
"learning_rate": 8.895348837209302e-05, |
|
"loss": 0.5041, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 3.604651162790698, |
|
"grad_norm": 3.8154025077819824, |
|
"learning_rate": 8.953488372093024e-05, |
|
"loss": 0.3206, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 3.6279069767441863, |
|
"grad_norm": 1.7028948068618774, |
|
"learning_rate": 9.011627906976745e-05, |
|
"loss": 0.3093, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 3.6511627906976747, |
|
"grad_norm": 2.867501735687256, |
|
"learning_rate": 9.069767441860465e-05, |
|
"loss": 0.2656, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 3.6744186046511627, |
|
"grad_norm": 4.052389621734619, |
|
"learning_rate": 9.127906976744186e-05, |
|
"loss": 0.4386, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 3.697674418604651, |
|
"grad_norm": 1.909553050994873, |
|
"learning_rate": 9.186046511627907e-05, |
|
"loss": 0.2907, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 3.7209302325581395, |
|
"grad_norm": 1.4470328092575073, |
|
"learning_rate": 9.244186046511628e-05, |
|
"loss": 0.1697, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 3.7209302325581395, |
|
"eval_accuracy": 0.8721311475409836, |
|
"eval_f1": 0.8721256490469893, |
|
"eval_loss": 0.26616957783699036, |
|
"eval_precision": 0.8730848683077983, |
|
"eval_recall": 0.87343844231929, |
|
"eval_runtime": 0.0715, |
|
"eval_samples_per_second": 4265.076, |
|
"eval_steps_per_second": 69.919, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 3.744186046511628, |
|
"grad_norm": 1.7241488695144653, |
|
"learning_rate": 9.30232558139535e-05, |
|
"loss": 0.248, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 3.7674418604651163, |
|
"grad_norm": 1.370429277420044, |
|
"learning_rate": 9.36046511627907e-05, |
|
"loss": 0.26, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 3.7906976744186047, |
|
"grad_norm": 2.8360958099365234, |
|
"learning_rate": 9.418604651162792e-05, |
|
"loss": 0.3161, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 3.813953488372093, |
|
"grad_norm": 2.840177297592163, |
|
"learning_rate": 9.476744186046512e-05, |
|
"loss": 0.3643, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 3.8372093023255816, |
|
"grad_norm": 1.9618759155273438, |
|
"learning_rate": 9.534883720930233e-05, |
|
"loss": 0.3486, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 3.8604651162790695, |
|
"grad_norm": 3.201247215270996, |
|
"learning_rate": 9.593023255813955e-05, |
|
"loss": 0.293, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 3.883720930232558, |
|
"grad_norm": 3.3960511684417725, |
|
"learning_rate": 9.651162790697675e-05, |
|
"loss": 0.2797, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 3.9069767441860463, |
|
"grad_norm": 2.5416486263275146, |
|
"learning_rate": 9.709302325581396e-05, |
|
"loss": 0.2781, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 3.9302325581395348, |
|
"grad_norm": 4.113210678100586, |
|
"learning_rate": 9.767441860465116e-05, |
|
"loss": 0.2734, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 3.953488372093023, |
|
"grad_norm": 3.773771286010742, |
|
"learning_rate": 9.825581395348838e-05, |
|
"loss": 0.2775, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 3.9767441860465116, |
|
"grad_norm": 1.3982651233673096, |
|
"learning_rate": 9.883720930232558e-05, |
|
"loss": 0.1802, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 1.2582266330718994, |
|
"learning_rate": 9.94186046511628e-05, |
|
"loss": 0.1667, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 4.023255813953488, |
|
"grad_norm": 6.577229976654053, |
|
"learning_rate": 0.0001, |
|
"loss": 0.4147, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 4.046511627906977, |
|
"grad_norm": 4.923567295074463, |
|
"learning_rate": 9.98546511627907e-05, |
|
"loss": 0.1983, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 4.069767441860465, |
|
"grad_norm": 1.9299342632293701, |
|
"learning_rate": 9.97093023255814e-05, |
|
"loss": 0.2169, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 4.093023255813954, |
|
"grad_norm": 3.2299985885620117, |
|
"learning_rate": 9.95639534883721e-05, |
|
"loss": 0.1698, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 4.116279069767442, |
|
"grad_norm": 2.034163236618042, |
|
"learning_rate": 9.94186046511628e-05, |
|
"loss": 0.191, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 4.1395348837209305, |
|
"grad_norm": 4.542312145233154, |
|
"learning_rate": 9.927325581395349e-05, |
|
"loss": 0.2465, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 4.162790697674419, |
|
"grad_norm": 1.8868296146392822, |
|
"learning_rate": 9.912790697674418e-05, |
|
"loss": 0.2437, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 4.186046511627907, |
|
"grad_norm": 4.527658939361572, |
|
"learning_rate": 9.898255813953488e-05, |
|
"loss": 0.2774, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 4.186046511627907, |
|
"eval_accuracy": 0.8852459016393442, |
|
"eval_f1": 0.8839686525146466, |
|
"eval_loss": 0.3168491721153259, |
|
"eval_precision": 0.8926612403786605, |
|
"eval_recall": 0.8823770138709399, |
|
"eval_runtime": 0.0728, |
|
"eval_samples_per_second": 4190.498, |
|
"eval_steps_per_second": 68.697, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 4.209302325581396, |
|
"grad_norm": 6.06718635559082, |
|
"learning_rate": 9.883720930232558e-05, |
|
"loss": 0.267, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 4.232558139534884, |
|
"grad_norm": 8.161118507385254, |
|
"learning_rate": 9.869186046511628e-05, |
|
"loss": 0.459, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 4.2558139534883725, |
|
"grad_norm": 3.9242475032806396, |
|
"learning_rate": 9.854651162790698e-05, |
|
"loss": 0.2698, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 4.27906976744186, |
|
"grad_norm": 6.657914161682129, |
|
"learning_rate": 9.840116279069768e-05, |
|
"loss": 0.2114, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 4.3023255813953485, |
|
"grad_norm": 2.23410964012146, |
|
"learning_rate": 9.825581395348838e-05, |
|
"loss": 0.1337, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 4.325581395348837, |
|
"grad_norm": 1.6781232357025146, |
|
"learning_rate": 9.811046511627908e-05, |
|
"loss": 0.1269, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 4.348837209302325, |
|
"grad_norm": 1.3690869808197021, |
|
"learning_rate": 9.796511627906976e-05, |
|
"loss": 0.1783, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 4.372093023255814, |
|
"grad_norm": 2.0360560417175293, |
|
"learning_rate": 9.781976744186046e-05, |
|
"loss": 0.281, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 4.395348837209302, |
|
"grad_norm": 3.783336877822876, |
|
"learning_rate": 9.767441860465116e-05, |
|
"loss": 0.3072, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 4.4186046511627906, |
|
"grad_norm": 3.0387606620788574, |
|
"learning_rate": 9.752906976744186e-05, |
|
"loss": 0.2682, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 4.441860465116279, |
|
"grad_norm": 1.6739147901535034, |
|
"learning_rate": 9.738372093023256e-05, |
|
"loss": 0.1597, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 4.465116279069767, |
|
"grad_norm": 4.560539245605469, |
|
"learning_rate": 9.723837209302326e-05, |
|
"loss": 0.2328, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 4.488372093023256, |
|
"grad_norm": 2.564955711364746, |
|
"learning_rate": 9.709302325581396e-05, |
|
"loss": 0.2731, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 4.511627906976744, |
|
"grad_norm": 6.892247676849365, |
|
"learning_rate": 9.694767441860465e-05, |
|
"loss": 0.2358, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 4.534883720930233, |
|
"grad_norm": 2.4123950004577637, |
|
"learning_rate": 9.680232558139535e-05, |
|
"loss": 0.2267, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 4.558139534883721, |
|
"grad_norm": 2.6707470417022705, |
|
"learning_rate": 9.665697674418605e-05, |
|
"loss": 0.1415, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 4.5813953488372094, |
|
"grad_norm": 1.8982809782028198, |
|
"learning_rate": 9.651162790697675e-05, |
|
"loss": 0.2119, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 4.604651162790698, |
|
"grad_norm": 3.8330225944519043, |
|
"learning_rate": 9.636627906976745e-05, |
|
"loss": 0.152, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 4.627906976744186, |
|
"grad_norm": 2.504080057144165, |
|
"learning_rate": 9.622093023255815e-05, |
|
"loss": 0.1623, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 4.651162790697675, |
|
"grad_norm": 1.6243199110031128, |
|
"learning_rate": 9.607558139534885e-05, |
|
"loss": 0.1537, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 4.651162790697675, |
|
"eval_accuracy": 0.8950819672131147, |
|
"eval_f1": 0.8942532720811303, |
|
"eval_loss": 0.2364664524793625, |
|
"eval_precision": 0.8994351024697845, |
|
"eval_recall": 0.892930989919876, |
|
"eval_runtime": 0.0733, |
|
"eval_samples_per_second": 4158.906, |
|
"eval_steps_per_second": 68.179, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 4.674418604651163, |
|
"grad_norm": 3.362497091293335, |
|
"learning_rate": 9.593023255813955e-05, |
|
"loss": 0.2097, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 4.6976744186046515, |
|
"grad_norm": 1.36155366897583, |
|
"learning_rate": 9.578488372093024e-05, |
|
"loss": 0.134, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 4.720930232558139, |
|
"grad_norm": 2.335468053817749, |
|
"learning_rate": 9.563953488372094e-05, |
|
"loss": 0.2128, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 4.7441860465116275, |
|
"grad_norm": 2.431711196899414, |
|
"learning_rate": 9.549418604651163e-05, |
|
"loss": 0.2257, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 4.767441860465116, |
|
"grad_norm": 2.425615072250366, |
|
"learning_rate": 9.534883720930233e-05, |
|
"loss": 0.1691, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 4.790697674418604, |
|
"grad_norm": 3.3857626914978027, |
|
"learning_rate": 9.520348837209303e-05, |
|
"loss": 0.2282, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 4.813953488372093, |
|
"grad_norm": 2.252249240875244, |
|
"learning_rate": 9.505813953488373e-05, |
|
"loss": 0.2333, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 4.837209302325581, |
|
"grad_norm": 4.851521015167236, |
|
"learning_rate": 9.491279069767442e-05, |
|
"loss": 0.2621, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 4.8604651162790695, |
|
"grad_norm": 1.249811053276062, |
|
"learning_rate": 9.476744186046512e-05, |
|
"loss": 0.075, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 4.883720930232558, |
|
"grad_norm": 1.6059314012527466, |
|
"learning_rate": 9.462209302325582e-05, |
|
"loss": 0.1878, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 4.906976744186046, |
|
"grad_norm": 7.576606750488281, |
|
"learning_rate": 9.447674418604652e-05, |
|
"loss": 0.2258, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 4.930232558139535, |
|
"grad_norm": 2.4628143310546875, |
|
"learning_rate": 9.433139534883722e-05, |
|
"loss": 0.1463, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 4.953488372093023, |
|
"grad_norm": 4.781926155090332, |
|
"learning_rate": 9.418604651162792e-05, |
|
"loss": 0.2608, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 4.976744186046512, |
|
"grad_norm": 3.2314810752868652, |
|
"learning_rate": 9.40406976744186e-05, |
|
"loss": 0.2344, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 3.6826560497283936, |
|
"learning_rate": 9.38953488372093e-05, |
|
"loss": 0.17, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 5.023255813953488, |
|
"grad_norm": 1.804560661315918, |
|
"learning_rate": 9.375e-05, |
|
"loss": 0.1241, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 5.046511627906977, |
|
"grad_norm": 3.6199300289154053, |
|
"learning_rate": 9.36046511627907e-05, |
|
"loss": 0.1335, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 5.069767441860465, |
|
"grad_norm": 2.37376070022583, |
|
"learning_rate": 9.34593023255814e-05, |
|
"loss": 0.1883, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 5.093023255813954, |
|
"grad_norm": 4.643337726593018, |
|
"learning_rate": 9.33139534883721e-05, |
|
"loss": 0.187, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 5.116279069767442, |
|
"grad_norm": 6.206487655639648, |
|
"learning_rate": 9.31686046511628e-05, |
|
"loss": 0.166, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 5.116279069767442, |
|
"eval_accuracy": 0.8819672131147541, |
|
"eval_f1": 0.8815073815073815, |
|
"eval_loss": 0.27603965997695923, |
|
"eval_precision": 0.8827922077922078, |
|
"eval_recall": 0.8809123804600671, |
|
"eval_runtime": 0.0725, |
|
"eval_samples_per_second": 4205.887, |
|
"eval_steps_per_second": 68.949, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 5.1395348837209305, |
|
"grad_norm": 2.4301323890686035, |
|
"learning_rate": 9.30232558139535e-05, |
|
"loss": 0.0946, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 5.162790697674419, |
|
"grad_norm": 4.870151519775391, |
|
"learning_rate": 9.28779069767442e-05, |
|
"loss": 0.2674, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 5.186046511627907, |
|
"grad_norm": 5.952517032623291, |
|
"learning_rate": 9.273255813953488e-05, |
|
"loss": 0.2605, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 5.209302325581396, |
|
"grad_norm": 1.8462685346603394, |
|
"learning_rate": 9.258720930232558e-05, |
|
"loss": 0.1902, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 5.232558139534884, |
|
"grad_norm": 2.608715295791626, |
|
"learning_rate": 9.244186046511628e-05, |
|
"loss": 0.1392, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 5.2558139534883725, |
|
"grad_norm": 4.296781539916992, |
|
"learning_rate": 9.229651162790698e-05, |
|
"loss": 0.2599, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 5.27906976744186, |
|
"grad_norm": 6.4339165687561035, |
|
"learning_rate": 9.215116279069768e-05, |
|
"loss": 0.162, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 5.3023255813953485, |
|
"grad_norm": 4.432989597320557, |
|
"learning_rate": 9.200581395348837e-05, |
|
"loss": 0.1832, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 5.325581395348837, |
|
"grad_norm": 1.9655786752700806, |
|
"learning_rate": 9.186046511627907e-05, |
|
"loss": 0.1192, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 5.348837209302325, |
|
"grad_norm": 4.3318891525268555, |
|
"learning_rate": 9.171511627906977e-05, |
|
"loss": 0.244, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 5.372093023255814, |
|
"grad_norm": 2.993924856185913, |
|
"learning_rate": 9.156976744186047e-05, |
|
"loss": 0.1828, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 5.395348837209302, |
|
"grad_norm": 2.020063638687134, |
|
"learning_rate": 9.142441860465116e-05, |
|
"loss": 0.1642, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 5.4186046511627906, |
|
"grad_norm": 2.384181499481201, |
|
"learning_rate": 9.127906976744186e-05, |
|
"loss": 0.1752, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 5.441860465116279, |
|
"grad_norm": 1.9194653034210205, |
|
"learning_rate": 9.113372093023255e-05, |
|
"loss": 0.0996, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 5.465116279069767, |
|
"grad_norm": 3.680722713470459, |
|
"learning_rate": 9.098837209302325e-05, |
|
"loss": 0.1925, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 5.488372093023256, |
|
"grad_norm": 3.332195520401001, |
|
"learning_rate": 9.084302325581395e-05, |
|
"loss": 0.13, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 5.511627906976744, |
|
"grad_norm": 2.6394619941711426, |
|
"learning_rate": 9.069767441860465e-05, |
|
"loss": 0.193, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 5.534883720930233, |
|
"grad_norm": 4.5959038734436035, |
|
"learning_rate": 9.055232558139536e-05, |
|
"loss": 0.2303, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 5.558139534883721, |
|
"grad_norm": 6.544390678405762, |
|
"learning_rate": 9.040697674418606e-05, |
|
"loss": 0.2432, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 5.5813953488372094, |
|
"grad_norm": 1.6620129346847534, |
|
"learning_rate": 9.026162790697675e-05, |
|
"loss": 0.1378, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 5.5813953488372094, |
|
"eval_accuracy": 0.8819672131147541, |
|
"eval_f1": 0.8816810344827587, |
|
"eval_loss": 0.21341156959533691, |
|
"eval_precision": 0.8819660110420979, |
|
"eval_recall": 0.8814723873524597, |
|
"eval_runtime": 0.091, |
|
"eval_samples_per_second": 3353.025, |
|
"eval_steps_per_second": 54.968, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 5.604651162790698, |
|
"grad_norm": 1.251617431640625, |
|
"learning_rate": 9.011627906976745e-05, |
|
"loss": 0.1108, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 5.627906976744186, |
|
"grad_norm": 2.331637144088745, |
|
"learning_rate": 8.997093023255815e-05, |
|
"loss": 0.1591, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 5.651162790697675, |
|
"grad_norm": 2.126330852508545, |
|
"learning_rate": 8.982558139534884e-05, |
|
"loss": 0.1115, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 5.674418604651163, |
|
"grad_norm": 2.0312886238098145, |
|
"learning_rate": 8.968023255813954e-05, |
|
"loss": 0.1009, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 5.6976744186046515, |
|
"grad_norm": 3.0067551136016846, |
|
"learning_rate": 8.953488372093024e-05, |
|
"loss": 0.1851, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 5.720930232558139, |
|
"grad_norm": 3.1301369667053223, |
|
"learning_rate": 8.938953488372094e-05, |
|
"loss": 0.107, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 5.7441860465116275, |
|
"grad_norm": 6.043979167938232, |
|
"learning_rate": 8.924418604651164e-05, |
|
"loss": 0.273, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 5.767441860465116, |
|
"grad_norm": 3.7104833126068115, |
|
"learning_rate": 8.909883720930234e-05, |
|
"loss": 0.1299, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 5.790697674418604, |
|
"grad_norm": 2.3856964111328125, |
|
"learning_rate": 8.895348837209302e-05, |
|
"loss": 0.1252, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 5.813953488372093, |
|
"grad_norm": 5.702451705932617, |
|
"learning_rate": 8.880813953488372e-05, |
|
"loss": 0.1939, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 5.837209302325581, |
|
"grad_norm": 3.5763566493988037, |
|
"learning_rate": 8.866279069767442e-05, |
|
"loss": 0.2426, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 5.8604651162790695, |
|
"grad_norm": 4.04267692565918, |
|
"learning_rate": 8.851744186046512e-05, |
|
"loss": 0.1966, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 5.883720930232558, |
|
"grad_norm": 2.391784191131592, |
|
"learning_rate": 8.837209302325582e-05, |
|
"loss": 0.2284, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 5.906976744186046, |
|
"grad_norm": 4.925626754760742, |
|
"learning_rate": 8.822674418604652e-05, |
|
"loss": 0.1559, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 5.930232558139535, |
|
"grad_norm": 5.659231185913086, |
|
"learning_rate": 8.808139534883722e-05, |
|
"loss": 0.1527, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 5.953488372093023, |
|
"grad_norm": 1.3519715070724487, |
|
"learning_rate": 8.793604651162792e-05, |
|
"loss": 0.097, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 5.976744186046512, |
|
"grad_norm": 3.517763137817383, |
|
"learning_rate": 8.779069767441861e-05, |
|
"loss": 0.2737, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 3.0338218212127686, |
|
"learning_rate": 8.76453488372093e-05, |
|
"loss": 0.1154, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 6.023255813953488, |
|
"grad_norm": 2.716304063796997, |
|
"learning_rate": 8.75e-05, |
|
"loss": 0.2143, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 6.046511627906977, |
|
"grad_norm": 1.8077433109283447, |
|
"learning_rate": 8.73546511627907e-05, |
|
"loss": 0.1411, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 6.046511627906977, |
|
"eval_accuracy": 0.8950819672131147, |
|
"eval_f1": 0.8945822172297591, |
|
"eval_loss": 0.23519852757453918, |
|
"eval_precision": 0.8966414996094767, |
|
"eval_recall": 0.8937710002584647, |
|
"eval_runtime": 0.0746, |
|
"eval_samples_per_second": 4089.283, |
|
"eval_steps_per_second": 67.037, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 6.069767441860465, |
|
"grad_norm": 4.399131774902344, |
|
"learning_rate": 8.72093023255814e-05, |
|
"loss": 0.1678, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 6.093023255813954, |
|
"grad_norm": 1.8267767429351807, |
|
"learning_rate": 8.70639534883721e-05, |
|
"loss": 0.0858, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 6.116279069767442, |
|
"grad_norm": 4.386148929595947, |
|
"learning_rate": 8.69186046511628e-05, |
|
"loss": 0.1666, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 6.1395348837209305, |
|
"grad_norm": 4.314615249633789, |
|
"learning_rate": 8.67732558139535e-05, |
|
"loss": 0.1782, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 6.162790697674419, |
|
"grad_norm": 1.9564558267593384, |
|
"learning_rate": 8.662790697674419e-05, |
|
"loss": 0.203, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 6.186046511627907, |
|
"grad_norm": 3.557976484298706, |
|
"learning_rate": 8.648255813953489e-05, |
|
"loss": 0.1347, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 6.209302325581396, |
|
"grad_norm": 2.464057207107544, |
|
"learning_rate": 8.633720930232559e-05, |
|
"loss": 0.0906, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 6.232558139534884, |
|
"grad_norm": 2.893970251083374, |
|
"learning_rate": 8.619186046511628e-05, |
|
"loss": 0.0893, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 6.2558139534883725, |
|
"grad_norm": 4.821974277496338, |
|
"learning_rate": 8.604651162790697e-05, |
|
"loss": 0.1008, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 6.27906976744186, |
|
"grad_norm": 2.767670154571533, |
|
"learning_rate": 8.590116279069767e-05, |
|
"loss": 0.1072, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 6.3023255813953485, |
|
"grad_norm": 2.337888240814209, |
|
"learning_rate": 8.575581395348837e-05, |
|
"loss": 0.1691, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 6.325581395348837, |
|
"grad_norm": 2.4074652194976807, |
|
"learning_rate": 8.561046511627907e-05, |
|
"loss": 0.1671, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 6.348837209302325, |
|
"grad_norm": 6.366968631744385, |
|
"learning_rate": 8.546511627906977e-05, |
|
"loss": 0.1741, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 6.372093023255814, |
|
"grad_norm": 2.4231204986572266, |
|
"learning_rate": 8.531976744186047e-05, |
|
"loss": 0.1563, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 6.395348837209302, |
|
"grad_norm": 3.424386739730835, |
|
"learning_rate": 8.517441860465117e-05, |
|
"loss": 0.2211, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 6.4186046511627906, |
|
"grad_norm": 6.0630717277526855, |
|
"learning_rate": 8.502906976744187e-05, |
|
"loss": 0.3506, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 6.441860465116279, |
|
"grad_norm": 2.5241973400115967, |
|
"learning_rate": 8.488372093023255e-05, |
|
"loss": 0.1239, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 6.465116279069767, |
|
"grad_norm": 1.1857047080993652, |
|
"learning_rate": 8.473837209302325e-05, |
|
"loss": 0.1218, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 6.488372093023256, |
|
"grad_norm": 2.826883316040039, |
|
"learning_rate": 8.459302325581395e-05, |
|
"loss": 0.1296, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 6.511627906976744, |
|
"grad_norm": 3.720126152038574, |
|
"learning_rate": 8.444767441860465e-05, |
|
"loss": 0.1594, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 6.511627906976744, |
|
"eval_accuracy": 0.8819672131147541, |
|
"eval_f1": 0.8815073815073815, |
|
"eval_loss": 0.25715914368629456, |
|
"eval_precision": 0.8827922077922078, |
|
"eval_recall": 0.8809123804600671, |
|
"eval_runtime": 0.0733, |
|
"eval_samples_per_second": 4160.082, |
|
"eval_steps_per_second": 68.198, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 6.534883720930233, |
|
"grad_norm": 2.7126853466033936, |
|
"learning_rate": 8.430232558139536e-05, |
|
"loss": 0.1234, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 6.558139534883721, |
|
"grad_norm": 2.641782522201538, |
|
"learning_rate": 8.415697674418606e-05, |
|
"loss": 0.1515, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 6.5813953488372094, |
|
"grad_norm": 5.715346813201904, |
|
"learning_rate": 8.401162790697676e-05, |
|
"loss": 0.1463, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 6.604651162790698, |
|
"grad_norm": 4.03624153137207, |
|
"learning_rate": 8.386627906976746e-05, |
|
"loss": 0.1425, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 6.627906976744186, |
|
"grad_norm": 0.8614564538002014, |
|
"learning_rate": 8.372093023255814e-05, |
|
"loss": 0.1115, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 6.651162790697675, |
|
"grad_norm": 3.0801596641540527, |
|
"learning_rate": 8.357558139534884e-05, |
|
"loss": 0.1954, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 6.674418604651163, |
|
"grad_norm": 2.3671586513519287, |
|
"learning_rate": 8.343023255813954e-05, |
|
"loss": 0.1223, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 6.6976744186046515, |
|
"grad_norm": 4.468645095825195, |
|
"learning_rate": 8.328488372093024e-05, |
|
"loss": 0.1422, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 6.720930232558139, |
|
"grad_norm": 2.540923833847046, |
|
"learning_rate": 8.313953488372094e-05, |
|
"loss": 0.127, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 6.7441860465116275, |
|
"grad_norm": 1.3170732259750366, |
|
"learning_rate": 8.299418604651164e-05, |
|
"loss": 0.0479, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 6.767441860465116, |
|
"grad_norm": 3.5731353759765625, |
|
"learning_rate": 8.284883720930234e-05, |
|
"loss": 0.1118, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 6.790697674418604, |
|
"grad_norm": 6.928558826446533, |
|
"learning_rate": 8.270348837209303e-05, |
|
"loss": 0.2011, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 6.813953488372093, |
|
"grad_norm": 4.524282932281494, |
|
"learning_rate": 8.255813953488373e-05, |
|
"loss": 0.1252, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 6.837209302325581, |
|
"grad_norm": 7.167698383331299, |
|
"learning_rate": 8.241279069767442e-05, |
|
"loss": 0.153, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 6.8604651162790695, |
|
"grad_norm": 4.981342792510986, |
|
"learning_rate": 8.226744186046512e-05, |
|
"loss": 0.2423, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 6.883720930232558, |
|
"grad_norm": 2.139774799346924, |
|
"learning_rate": 8.212209302325582e-05, |
|
"loss": 0.1548, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 6.906976744186046, |
|
"grad_norm": 2.5026113986968994, |
|
"learning_rate": 8.197674418604652e-05, |
|
"loss": 0.0935, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 6.930232558139535, |
|
"grad_norm": 1.633834719657898, |
|
"learning_rate": 8.183139534883721e-05, |
|
"loss": 0.1224, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 6.953488372093023, |
|
"grad_norm": 4.0517168045043945, |
|
"learning_rate": 8.168604651162791e-05, |
|
"loss": 0.106, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 6.976744186046512, |
|
"grad_norm": 4.622942924499512, |
|
"learning_rate": 8.154069767441861e-05, |
|
"loss": 0.1923, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 6.976744186046512, |
|
"eval_accuracy": 0.8918032786885246, |
|
"eval_f1": 0.8912373974778747, |
|
"eval_loss": 0.276712030172348, |
|
"eval_precision": 0.8937043795620438, |
|
"eval_recall": 0.8903463427242182, |
|
"eval_runtime": 0.0737, |
|
"eval_samples_per_second": 4140.129, |
|
"eval_steps_per_second": 67.871, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"grad_norm": 3.300420045852661, |
|
"learning_rate": 8.139534883720931e-05, |
|
"loss": 0.1218, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 7.023255813953488, |
|
"grad_norm": 2.9694247245788574, |
|
"learning_rate": 8.125000000000001e-05, |
|
"loss": 0.1067, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 7.046511627906977, |
|
"grad_norm": 3.805917978286743, |
|
"learning_rate": 8.11046511627907e-05, |
|
"loss": 0.1165, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 7.069767441860465, |
|
"grad_norm": 2.0280096530914307, |
|
"learning_rate": 8.09593023255814e-05, |
|
"loss": 0.119, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 7.093023255813954, |
|
"grad_norm": 5.049257278442383, |
|
"learning_rate": 8.081395348837209e-05, |
|
"loss": 0.1838, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 7.116279069767442, |
|
"grad_norm": 6.135448455810547, |
|
"learning_rate": 8.066860465116279e-05, |
|
"loss": 0.2193, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 7.1395348837209305, |
|
"grad_norm": 2.0232150554656982, |
|
"learning_rate": 8.052325581395349e-05, |
|
"loss": 0.0829, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 7.162790697674419, |
|
"grad_norm": 3.0096096992492676, |
|
"learning_rate": 8.037790697674419e-05, |
|
"loss": 0.1394, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 7.186046511627907, |
|
"grad_norm": 0.9171739220619202, |
|
"learning_rate": 8.023255813953489e-05, |
|
"loss": 0.1058, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 7.209302325581396, |
|
"grad_norm": 3.929137706756592, |
|
"learning_rate": 8.008720930232559e-05, |
|
"loss": 0.2189, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 7.232558139534884, |
|
"grad_norm": 1.1936514377593994, |
|
"learning_rate": 7.994186046511629e-05, |
|
"loss": 0.0981, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 7.2558139534883725, |
|
"grad_norm": 2.542099714279175, |
|
"learning_rate": 7.979651162790697e-05, |
|
"loss": 0.1052, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 7.27906976744186, |
|
"grad_norm": 2.42411732673645, |
|
"learning_rate": 7.965116279069767e-05, |
|
"loss": 0.0834, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 7.3023255813953485, |
|
"grad_norm": 1.07964289188385, |
|
"learning_rate": 7.950581395348837e-05, |
|
"loss": 0.0684, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 7.325581395348837, |
|
"grad_norm": 0.9596773386001587, |
|
"learning_rate": 7.936046511627907e-05, |
|
"loss": 0.0791, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 7.348837209302325, |
|
"grad_norm": 3.508575201034546, |
|
"learning_rate": 7.921511627906977e-05, |
|
"loss": 0.145, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 7.372093023255814, |
|
"grad_norm": 3.8409645557403564, |
|
"learning_rate": 7.906976744186047e-05, |
|
"loss": 0.1057, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 7.395348837209302, |
|
"grad_norm": 5.247814655303955, |
|
"learning_rate": 7.892441860465116e-05, |
|
"loss": 0.0731, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 7.4186046511627906, |
|
"grad_norm": 2.5410549640655518, |
|
"learning_rate": 7.877906976744186e-05, |
|
"loss": 0.2125, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 7.441860465116279, |
|
"grad_norm": 3.2063095569610596, |
|
"learning_rate": 7.863372093023256e-05, |
|
"loss": 0.0896, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 7.441860465116279, |
|
"eval_accuracy": 0.9114754098360656, |
|
"eval_f1": 0.9114411381991419, |
|
"eval_loss": 0.22706902027130127, |
|
"eval_precision": 0.9115497076023391, |
|
"eval_recall": 0.9122943051606789, |
|
"eval_runtime": 0.0744, |
|
"eval_samples_per_second": 4097.456, |
|
"eval_steps_per_second": 67.171, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 7.465116279069767, |
|
"grad_norm": 1.1547659635543823, |
|
"learning_rate": 7.848837209302326e-05, |
|
"loss": 0.0425, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 7.488372093023256, |
|
"grad_norm": 10.542337417602539, |
|
"learning_rate": 7.834302325581395e-05, |
|
"loss": 0.4056, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 7.511627906976744, |
|
"grad_norm": 6.774454593658447, |
|
"learning_rate": 7.819767441860465e-05, |
|
"loss": 0.106, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 7.534883720930233, |
|
"grad_norm": 3.680190086364746, |
|
"learning_rate": 7.805232558139536e-05, |
|
"loss": 0.1208, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 7.558139534883721, |
|
"grad_norm": 3.1805880069732666, |
|
"learning_rate": 7.790697674418606e-05, |
|
"loss": 0.0968, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 7.5813953488372094, |
|
"grad_norm": 3.887376070022583, |
|
"learning_rate": 7.776162790697676e-05, |
|
"loss": 0.3548, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 7.604651162790698, |
|
"grad_norm": 2.4842185974121094, |
|
"learning_rate": 7.761627906976745e-05, |
|
"loss": 0.0782, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 7.627906976744186, |
|
"grad_norm": 4.861859321594238, |
|
"learning_rate": 7.747093023255815e-05, |
|
"loss": 0.1529, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 7.651162790697675, |
|
"grad_norm": 4.307983875274658, |
|
"learning_rate": 7.732558139534884e-05, |
|
"loss": 0.1938, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 7.674418604651163, |
|
"grad_norm": 6.8119587898254395, |
|
"learning_rate": 7.718023255813954e-05, |
|
"loss": 0.1118, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 7.6976744186046515, |
|
"grad_norm": 1.8230775594711304, |
|
"learning_rate": 7.703488372093024e-05, |
|
"loss": 0.0551, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 7.720930232558139, |
|
"grad_norm": 1.6934735774993896, |
|
"learning_rate": 7.688953488372094e-05, |
|
"loss": 0.0629, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 7.7441860465116275, |
|
"grad_norm": 2.537760019302368, |
|
"learning_rate": 7.674418604651163e-05, |
|
"loss": 0.1084, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 7.767441860465116, |
|
"grad_norm": 1.6460330486297607, |
|
"learning_rate": 7.659883720930233e-05, |
|
"loss": 0.0927, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 7.790697674418604, |
|
"grad_norm": 1.8621909618377686, |
|
"learning_rate": 7.645348837209303e-05, |
|
"loss": 0.107, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 7.813953488372093, |
|
"grad_norm": 7.628468036651611, |
|
"learning_rate": 7.630813953488373e-05, |
|
"loss": 0.1038, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 7.837209302325581, |
|
"grad_norm": 2.3361170291900635, |
|
"learning_rate": 7.616279069767443e-05, |
|
"loss": 0.1017, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 7.8604651162790695, |
|
"grad_norm": 1.8995561599731445, |
|
"learning_rate": 7.601744186046513e-05, |
|
"loss": 0.0414, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 7.883720930232558, |
|
"grad_norm": 5.01551628112793, |
|
"learning_rate": 7.587209302325581e-05, |
|
"loss": 0.1229, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 7.906976744186046, |
|
"grad_norm": 4.123791694641113, |
|
"learning_rate": 7.572674418604651e-05, |
|
"loss": 0.099, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 7.906976744186046, |
|
"eval_accuracy": 0.9016393442622951, |
|
"eval_f1": 0.9016129032258065, |
|
"eval_loss": 0.22697412967681885, |
|
"eval_precision": 0.9018878472520857, |
|
"eval_recall": 0.9025803394503318, |
|
"eval_runtime": 0.0787, |
|
"eval_samples_per_second": 3874.417, |
|
"eval_steps_per_second": 63.515, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 7.930232558139535, |
|
"grad_norm": 1.9877846240997314, |
|
"learning_rate": 7.558139534883721e-05, |
|
"loss": 0.0625, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 7.953488372093023, |
|
"grad_norm": 1.6333461999893188, |
|
"learning_rate": 7.543604651162791e-05, |
|
"loss": 0.051, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 7.976744186046512, |
|
"grad_norm": 8.968517303466797, |
|
"learning_rate": 7.529069767441861e-05, |
|
"loss": 0.0781, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 2.306673765182495, |
|
"learning_rate": 7.514534883720931e-05, |
|
"loss": 0.0684, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 8.023255813953488, |
|
"grad_norm": 5.471800804138184, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.1, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 8.046511627906977, |
|
"grad_norm": 6.212913513183594, |
|
"learning_rate": 7.48546511627907e-05, |
|
"loss": 0.0966, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 8.069767441860465, |
|
"grad_norm": 7.509064197540283, |
|
"learning_rate": 7.47093023255814e-05, |
|
"loss": 0.2218, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 8.093023255813954, |
|
"grad_norm": 5.755818843841553, |
|
"learning_rate": 7.456395348837209e-05, |
|
"loss": 0.2037, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 8.116279069767442, |
|
"grad_norm": 2.3190879821777344, |
|
"learning_rate": 7.441860465116279e-05, |
|
"loss": 0.1259, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 8.13953488372093, |
|
"grad_norm": 0.9036604166030884, |
|
"learning_rate": 7.427325581395349e-05, |
|
"loss": 0.0662, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 8.162790697674419, |
|
"grad_norm": 2.2601730823516846, |
|
"learning_rate": 7.412790697674419e-05, |
|
"loss": 0.0833, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 8.186046511627907, |
|
"grad_norm": 2.334266185760498, |
|
"learning_rate": 7.398255813953489e-05, |
|
"loss": 0.0482, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 8.209302325581396, |
|
"grad_norm": 5.374472141265869, |
|
"learning_rate": 7.383720930232558e-05, |
|
"loss": 0.1771, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 8.232558139534884, |
|
"grad_norm": 5.360229969024658, |
|
"learning_rate": 7.369186046511628e-05, |
|
"loss": 0.2253, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 8.255813953488373, |
|
"grad_norm": 2.7183334827423096, |
|
"learning_rate": 7.354651162790698e-05, |
|
"loss": 0.13, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 8.279069767441861, |
|
"grad_norm": 1.2531534433364868, |
|
"learning_rate": 7.340116279069768e-05, |
|
"loss": 0.0898, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 8.30232558139535, |
|
"grad_norm": 5.129657745361328, |
|
"learning_rate": 7.325581395348837e-05, |
|
"loss": 0.0884, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 8.325581395348838, |
|
"grad_norm": 0.5411895513534546, |
|
"learning_rate": 7.311046511627907e-05, |
|
"loss": 0.0227, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 8.348837209302326, |
|
"grad_norm": 3.1815500259399414, |
|
"learning_rate": 7.296511627906976e-05, |
|
"loss": 0.1153, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 8.372093023255815, |
|
"grad_norm": 3.956552743911743, |
|
"learning_rate": 7.281976744186046e-05, |
|
"loss": 0.1048, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 8.372093023255815, |
|
"eval_accuracy": 0.8918032786885246, |
|
"eval_f1": 0.8917846660000646, |
|
"eval_loss": 0.2492106705904007, |
|
"eval_precision": 0.8922580645161291, |
|
"eval_recall": 0.8928663737399845, |
|
"eval_runtime": 0.0761, |
|
"eval_samples_per_second": 4010.404, |
|
"eval_steps_per_second": 65.744, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 8.395348837209303, |
|
"grad_norm": 3.400632619857788, |
|
"learning_rate": 7.267441860465116e-05, |
|
"loss": 0.1008, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 8.418604651162791, |
|
"grad_norm": 4.204819202423096, |
|
"learning_rate": 7.252906976744186e-05, |
|
"loss": 0.0525, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 8.44186046511628, |
|
"grad_norm": 4.265756130218506, |
|
"learning_rate": 7.238372093023256e-05, |
|
"loss": 0.1789, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 8.465116279069768, |
|
"grad_norm": 3.7337512969970703, |
|
"learning_rate": 7.223837209302326e-05, |
|
"loss": 0.2187, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 8.488372093023255, |
|
"grad_norm": 7.506768226623535, |
|
"learning_rate": 7.209302325581396e-05, |
|
"loss": 0.1643, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 8.511627906976745, |
|
"grad_norm": 2.040639877319336, |
|
"learning_rate": 7.194767441860464e-05, |
|
"loss": 0.0645, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 8.534883720930232, |
|
"grad_norm": 3.809933662414551, |
|
"learning_rate": 7.180232558139535e-05, |
|
"loss": 0.1413, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 8.55813953488372, |
|
"grad_norm": 4.585285186767578, |
|
"learning_rate": 7.165697674418605e-05, |
|
"loss": 0.073, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 8.581395348837209, |
|
"grad_norm": 1.4243372678756714, |
|
"learning_rate": 7.151162790697675e-05, |
|
"loss": 0.0358, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 8.604651162790697, |
|
"grad_norm": 7.1119513511657715, |
|
"learning_rate": 7.136627906976745e-05, |
|
"loss": 0.1204, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 8.627906976744185, |
|
"grad_norm": 4.922991752624512, |
|
"learning_rate": 7.122093023255815e-05, |
|
"loss": 0.1183, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 8.651162790697674, |
|
"grad_norm": 2.3582427501678467, |
|
"learning_rate": 7.107558139534885e-05, |
|
"loss": 0.1057, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 8.674418604651162, |
|
"grad_norm": 1.6656956672668457, |
|
"learning_rate": 7.093023255813955e-05, |
|
"loss": 0.0434, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 8.69767441860465, |
|
"grad_norm": 3.274289608001709, |
|
"learning_rate": 7.078488372093023e-05, |
|
"loss": 0.1541, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 8.720930232558139, |
|
"grad_norm": 4.472874641418457, |
|
"learning_rate": 7.063953488372093e-05, |
|
"loss": 0.1322, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 8.744186046511627, |
|
"grad_norm": 2.455209493637085, |
|
"learning_rate": 7.049418604651163e-05, |
|
"loss": 0.1081, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 8.767441860465116, |
|
"grad_norm": 2.7206978797912598, |
|
"learning_rate": 7.034883720930233e-05, |
|
"loss": 0.1198, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 8.790697674418604, |
|
"grad_norm": 6.333116054534912, |
|
"learning_rate": 7.020348837209303e-05, |
|
"loss": 0.1965, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 8.813953488372093, |
|
"grad_norm": 1.0274301767349243, |
|
"learning_rate": 7.005813953488373e-05, |
|
"loss": 0.0388, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 8.837209302325581, |
|
"grad_norm": 4.29472541809082, |
|
"learning_rate": 6.991279069767443e-05, |
|
"loss": 0.0699, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 8.837209302325581, |
|
"eval_accuracy": 0.898360655737705, |
|
"eval_f1": 0.8978290703580035, |
|
"eval_loss": 0.26147955656051636, |
|
"eval_precision": 0.9003302050747306, |
|
"eval_recall": 0.8969156543465151, |
|
"eval_runtime": 0.0748, |
|
"eval_samples_per_second": 4079.802, |
|
"eval_steps_per_second": 66.882, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 8.86046511627907, |
|
"grad_norm": 3.4688355922698975, |
|
"learning_rate": 6.976744186046513e-05, |
|
"loss": 0.1465, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 8.883720930232558, |
|
"grad_norm": 4.693603992462158, |
|
"learning_rate": 6.962209302325582e-05, |
|
"loss": 0.0675, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 8.906976744186046, |
|
"grad_norm": 1.98982572555542, |
|
"learning_rate": 6.947674418604651e-05, |
|
"loss": 0.1484, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 8.930232558139535, |
|
"grad_norm": 1.312723994255066, |
|
"learning_rate": 6.933139534883721e-05, |
|
"loss": 0.0458, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 8.953488372093023, |
|
"grad_norm": 2.106917142868042, |
|
"learning_rate": 6.918604651162791e-05, |
|
"loss": 0.0417, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 8.976744186046512, |
|
"grad_norm": 4.182035446166992, |
|
"learning_rate": 6.90406976744186e-05, |
|
"loss": 0.0459, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"grad_norm": 2.1285786628723145, |
|
"learning_rate": 6.88953488372093e-05, |
|
"loss": 0.0743, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 9.023255813953488, |
|
"grad_norm": 4.470812797546387, |
|
"learning_rate": 6.875e-05, |
|
"loss": 0.0973, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 9.046511627906977, |
|
"grad_norm": 1.6776498556137085, |
|
"learning_rate": 6.86046511627907e-05, |
|
"loss": 0.1894, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 9.069767441860465, |
|
"grad_norm": 3.9984216690063477, |
|
"learning_rate": 6.84593023255814e-05, |
|
"loss": 0.1704, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 9.093023255813954, |
|
"grad_norm": 2.3983538150787354, |
|
"learning_rate": 6.83139534883721e-05, |
|
"loss": 0.0736, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 9.116279069767442, |
|
"grad_norm": 2.989044427871704, |
|
"learning_rate": 6.81686046511628e-05, |
|
"loss": 0.0892, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 9.13953488372093, |
|
"grad_norm": 3.356992483139038, |
|
"learning_rate": 6.802325581395348e-05, |
|
"loss": 0.0999, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 9.162790697674419, |
|
"grad_norm": 2.2563319206237793, |
|
"learning_rate": 6.787790697674418e-05, |
|
"loss": 0.0452, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 9.186046511627907, |
|
"grad_norm": 5.215000152587891, |
|
"learning_rate": 6.773255813953488e-05, |
|
"loss": 0.0793, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 9.209302325581396, |
|
"grad_norm": 5.427519798278809, |
|
"learning_rate": 6.758720930232558e-05, |
|
"loss": 0.0755, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 9.232558139534884, |
|
"grad_norm": 7.729520797729492, |
|
"learning_rate": 6.744186046511628e-05, |
|
"loss": 0.2358, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 9.255813953488373, |
|
"grad_norm": 5.97319221496582, |
|
"learning_rate": 6.729651162790698e-05, |
|
"loss": 0.1829, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 9.279069767441861, |
|
"grad_norm": 4.662299633026123, |
|
"learning_rate": 6.715116279069768e-05, |
|
"loss": 0.0549, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 9.30232558139535, |
|
"grad_norm": 5.684605598449707, |
|
"learning_rate": 6.700581395348838e-05, |
|
"loss": 0.1466, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 9.30232558139535, |
|
"eval_accuracy": 0.9147540983606557, |
|
"eval_f1": 0.9145474137931033, |
|
"eval_loss": 0.22707484662532806, |
|
"eval_precision": 0.9148550724637681, |
|
"eval_recall": 0.9143189454639442, |
|
"eval_runtime": 0.0754, |
|
"eval_samples_per_second": 4044.013, |
|
"eval_steps_per_second": 66.295, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 9.325581395348838, |
|
"grad_norm": 8.901346206665039, |
|
"learning_rate": 6.686046511627908e-05, |
|
"loss": 0.1351, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 9.348837209302326, |
|
"grad_norm": 3.3197011947631836, |
|
"learning_rate": 6.671511627906976e-05, |
|
"loss": 0.0962, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 9.372093023255815, |
|
"grad_norm": 5.270369529724121, |
|
"learning_rate": 6.656976744186046e-05, |
|
"loss": 0.0862, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 9.395348837209303, |
|
"grad_norm": 1.4588712453842163, |
|
"learning_rate": 6.642441860465116e-05, |
|
"loss": 0.0286, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 9.418604651162791, |
|
"grad_norm": 7.928572177886963, |
|
"learning_rate": 6.627906976744186e-05, |
|
"loss": 0.1137, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 9.44186046511628, |
|
"grad_norm": 4.6285223960876465, |
|
"learning_rate": 6.613372093023256e-05, |
|
"loss": 0.0907, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 9.465116279069768, |
|
"grad_norm": 1.1047800779342651, |
|
"learning_rate": 6.598837209302326e-05, |
|
"loss": 0.0279, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 9.488372093023255, |
|
"grad_norm": 2.0423073768615723, |
|
"learning_rate": 6.584302325581395e-05, |
|
"loss": 0.0528, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 9.511627906976745, |
|
"grad_norm": 3.1448893547058105, |
|
"learning_rate": 6.569767441860465e-05, |
|
"loss": 0.0557, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 9.534883720930232, |
|
"grad_norm": 3.5887062549591064, |
|
"learning_rate": 6.555232558139535e-05, |
|
"loss": 0.0858, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 9.55813953488372, |
|
"grad_norm": 4.293898105621338, |
|
"learning_rate": 6.540697674418605e-05, |
|
"loss": 0.0547, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 9.581395348837209, |
|
"grad_norm": 3.3504300117492676, |
|
"learning_rate": 6.526162790697675e-05, |
|
"loss": 0.1361, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 9.604651162790697, |
|
"grad_norm": 0.5247292518615723, |
|
"learning_rate": 6.511627906976745e-05, |
|
"loss": 0.0168, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 9.627906976744185, |
|
"grad_norm": 0.9081286191940308, |
|
"learning_rate": 6.497093023255815e-05, |
|
"loss": 0.0337, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 9.651162790697674, |
|
"grad_norm": 5.076108932495117, |
|
"learning_rate": 6.482558139534885e-05, |
|
"loss": 0.0972, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 9.674418604651162, |
|
"grad_norm": 2.2447941303253174, |
|
"learning_rate": 6.468023255813955e-05, |
|
"loss": 0.0468, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 9.69767441860465, |
|
"grad_norm": 6.019301891326904, |
|
"learning_rate": 6.453488372093024e-05, |
|
"loss": 0.0967, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 9.720930232558139, |
|
"grad_norm": 1.4316606521606445, |
|
"learning_rate": 6.438953488372094e-05, |
|
"loss": 0.0666, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 9.744186046511627, |
|
"grad_norm": 1.9364207983016968, |
|
"learning_rate": 6.424418604651163e-05, |
|
"loss": 0.1047, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 9.767441860465116, |
|
"grad_norm": 4.00551700592041, |
|
"learning_rate": 6.409883720930233e-05, |
|
"loss": 0.0663, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 9.767441860465116, |
|
"eval_accuracy": 0.9114754098360656, |
|
"eval_f1": 0.9106105834464043, |
|
"eval_loss": 0.26964467763900757, |
|
"eval_precision": 0.9181201550387597, |
|
"eval_recall": 0.9089342638063238, |
|
"eval_runtime": 0.0726, |
|
"eval_samples_per_second": 4201.095, |
|
"eval_steps_per_second": 68.87, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 9.790697674418604, |
|
"grad_norm": 3.918790817260742, |
|
"learning_rate": 6.395348837209303e-05, |
|
"loss": 0.0773, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 9.813953488372093, |
|
"grad_norm": 4.5966901779174805, |
|
"learning_rate": 6.380813953488373e-05, |
|
"loss": 0.1099, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 9.837209302325581, |
|
"grad_norm": 3.332273244857788, |
|
"learning_rate": 6.366279069767442e-05, |
|
"loss": 0.0697, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 9.86046511627907, |
|
"grad_norm": 3.300297737121582, |
|
"learning_rate": 6.351744186046512e-05, |
|
"loss": 0.1235, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 9.883720930232558, |
|
"grad_norm": 1.9253029823303223, |
|
"learning_rate": 6.337209302325582e-05, |
|
"loss": 0.0276, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 9.906976744186046, |
|
"grad_norm": 2.729339599609375, |
|
"learning_rate": 6.322674418604652e-05, |
|
"loss": 0.2399, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 9.930232558139535, |
|
"grad_norm": 2.6790566444396973, |
|
"learning_rate": 6.308139534883722e-05, |
|
"loss": 0.0897, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 9.953488372093023, |
|
"grad_norm": 2.726901054382324, |
|
"learning_rate": 6.29360465116279e-05, |
|
"loss": 0.0768, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 9.976744186046512, |
|
"grad_norm": 2.4175329208374023, |
|
"learning_rate": 6.27906976744186e-05, |
|
"loss": 0.0943, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 4.533559799194336, |
|
"learning_rate": 6.26453488372093e-05, |
|
"loss": 0.127, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 10.023255813953488, |
|
"grad_norm": 3.8240880966186523, |
|
"learning_rate": 6.25e-05, |
|
"loss": 0.083, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 10.046511627906977, |
|
"grad_norm": 2.4785892963409424, |
|
"learning_rate": 6.23546511627907e-05, |
|
"loss": 0.0292, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 10.069767441860465, |
|
"grad_norm": 1.059528112411499, |
|
"learning_rate": 6.22093023255814e-05, |
|
"loss": 0.0619, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 10.093023255813954, |
|
"grad_norm": 2.8850722312927246, |
|
"learning_rate": 6.20639534883721e-05, |
|
"loss": 0.1071, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 10.116279069767442, |
|
"grad_norm": 1.9426606893539429, |
|
"learning_rate": 6.19186046511628e-05, |
|
"loss": 0.0988, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 10.13953488372093, |
|
"grad_norm": 2.1318485736846924, |
|
"learning_rate": 6.17732558139535e-05, |
|
"loss": 0.1346, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 10.162790697674419, |
|
"grad_norm": 2.793272018432617, |
|
"learning_rate": 6.162790697674418e-05, |
|
"loss": 0.0296, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 10.186046511627907, |
|
"grad_norm": 1.0038514137268066, |
|
"learning_rate": 6.148255813953488e-05, |
|
"loss": 0.0576, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 10.209302325581396, |
|
"grad_norm": 2.623504400253296, |
|
"learning_rate": 6.133720930232558e-05, |
|
"loss": 0.0607, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 10.232558139534884, |
|
"grad_norm": 1.1035139560699463, |
|
"learning_rate": 6.119186046511628e-05, |
|
"loss": 0.0473, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 10.232558139534884, |
|
"eval_accuracy": 0.9049180327868852, |
|
"eval_f1": 0.9045074224021592, |
|
"eval_loss": 0.2681082785129547, |
|
"eval_precision": 0.9062147872063795, |
|
"eval_recall": 0.9037649694150082, |
|
"eval_runtime": 0.0732, |
|
"eval_samples_per_second": 4164.7, |
|
"eval_steps_per_second": 68.274, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 10.255813953488373, |
|
"grad_norm": 4.833049774169922, |
|
"learning_rate": 6.104651162790698e-05, |
|
"loss": 0.0978, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 10.279069767441861, |
|
"grad_norm": 0.8670517802238464, |
|
"learning_rate": 6.0901162790697675e-05, |
|
"loss": 0.0284, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 10.30232558139535, |
|
"grad_norm": 4.661880016326904, |
|
"learning_rate": 6.0755813953488374e-05, |
|
"loss": 0.1335, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 10.325581395348838, |
|
"grad_norm": 4.113884449005127, |
|
"learning_rate": 6.0610465116279066e-05, |
|
"loss": 0.0624, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 10.348837209302326, |
|
"grad_norm": 4.014869689941406, |
|
"learning_rate": 6.0465116279069765e-05, |
|
"loss": 0.075, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 10.372093023255815, |
|
"grad_norm": 0.5572329759597778, |
|
"learning_rate": 6.0319767441860464e-05, |
|
"loss": 0.0172, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 10.395348837209303, |
|
"grad_norm": 2.2173776626586914, |
|
"learning_rate": 6.017441860465116e-05, |
|
"loss": 0.0753, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 10.418604651162791, |
|
"grad_norm": 4.567299842834473, |
|
"learning_rate": 6.002906976744186e-05, |
|
"loss": 0.1678, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 10.44186046511628, |
|
"grad_norm": 2.608336925506592, |
|
"learning_rate": 5.9883720930232554e-05, |
|
"loss": 0.1871, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 10.465116279069768, |
|
"grad_norm": 2.773268699645996, |
|
"learning_rate": 5.973837209302325e-05, |
|
"loss": 0.1034, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 10.488372093023255, |
|
"grad_norm": 4.1804304122924805, |
|
"learning_rate": 5.959302325581395e-05, |
|
"loss": 0.1966, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 10.511627906976745, |
|
"grad_norm": 2.119896650314331, |
|
"learning_rate": 5.944767441860465e-05, |
|
"loss": 0.0443, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 10.534883720930232, |
|
"grad_norm": 2.4072678089141846, |
|
"learning_rate": 5.9302325581395356e-05, |
|
"loss": 0.0509, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 10.55813953488372, |
|
"grad_norm": 2.580864906311035, |
|
"learning_rate": 5.9156976744186055e-05, |
|
"loss": 0.1404, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 10.581395348837209, |
|
"grad_norm": 1.998783826828003, |
|
"learning_rate": 5.9011627906976754e-05, |
|
"loss": 0.0546, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 10.604651162790697, |
|
"grad_norm": 1.9264705181121826, |
|
"learning_rate": 5.886627906976745e-05, |
|
"loss": 0.0753, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 10.627906976744185, |
|
"grad_norm": 1.8207919597625732, |
|
"learning_rate": 5.8720930232558145e-05, |
|
"loss": 0.0783, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 10.651162790697674, |
|
"grad_norm": 3.059663772583008, |
|
"learning_rate": 5.8575581395348844e-05, |
|
"loss": 0.0829, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 10.674418604651162, |
|
"grad_norm": 2.6100552082061768, |
|
"learning_rate": 5.843023255813954e-05, |
|
"loss": 0.1253, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 10.69767441860465, |
|
"grad_norm": 2.3080873489379883, |
|
"learning_rate": 5.828488372093024e-05, |
|
"loss": 0.0337, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 10.69767441860465, |
|
"eval_accuracy": 0.8918032786885246, |
|
"eval_f1": 0.8916355336398186, |
|
"eval_loss": 0.2402210682630539, |
|
"eval_precision": 0.8915439593558943, |
|
"eval_recall": 0.8917463599551995, |
|
"eval_runtime": 0.0746, |
|
"eval_samples_per_second": 4086.997, |
|
"eval_steps_per_second": 67.0, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 10.720930232558139, |
|
"grad_norm": 1.366176724433899, |
|
"learning_rate": 5.8139534883720933e-05, |
|
"loss": 0.0355, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 10.744186046511627, |
|
"grad_norm": 3.6105799674987793, |
|
"learning_rate": 5.799418604651163e-05, |
|
"loss": 0.1579, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 10.767441860465116, |
|
"grad_norm": 2.1533732414245605, |
|
"learning_rate": 5.784883720930233e-05, |
|
"loss": 0.0605, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 10.790697674418604, |
|
"grad_norm": 2.887242078781128, |
|
"learning_rate": 5.770348837209303e-05, |
|
"loss": 0.0787, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 10.813953488372093, |
|
"grad_norm": 4.048674583435059, |
|
"learning_rate": 5.755813953488373e-05, |
|
"loss": 0.0646, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 10.837209302325581, |
|
"grad_norm": 4.225156307220459, |
|
"learning_rate": 5.741279069767442e-05, |
|
"loss": 0.0449, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 10.86046511627907, |
|
"grad_norm": 5.0084123611450195, |
|
"learning_rate": 5.726744186046512e-05, |
|
"loss": 0.0798, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 10.883720930232558, |
|
"grad_norm": 2.2451436519622803, |
|
"learning_rate": 5.712209302325582e-05, |
|
"loss": 0.1, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 10.906976744186046, |
|
"grad_norm": 2.683537006378174, |
|
"learning_rate": 5.697674418604652e-05, |
|
"loss": 0.0875, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 10.930232558139535, |
|
"grad_norm": 2.50972580909729, |
|
"learning_rate": 5.683139534883721e-05, |
|
"loss": 0.0313, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 10.953488372093023, |
|
"grad_norm": 8.650345802307129, |
|
"learning_rate": 5.668604651162791e-05, |
|
"loss": 0.0685, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 10.976744186046512, |
|
"grad_norm": 2.9768123626708984, |
|
"learning_rate": 5.654069767441861e-05, |
|
"loss": 0.031, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"grad_norm": 2.2081665992736816, |
|
"learning_rate": 5.6395348837209306e-05, |
|
"loss": 0.0335, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 11.023255813953488, |
|
"grad_norm": 4.979495048522949, |
|
"learning_rate": 5.6250000000000005e-05, |
|
"loss": 0.0331, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 11.046511627906977, |
|
"grad_norm": 2.000579833984375, |
|
"learning_rate": 5.61046511627907e-05, |
|
"loss": 0.0472, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 11.069767441860465, |
|
"grad_norm": 2.3896875381469727, |
|
"learning_rate": 5.5959302325581396e-05, |
|
"loss": 0.0745, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 11.093023255813954, |
|
"grad_norm": 2.0320870876312256, |
|
"learning_rate": 5.5813953488372095e-05, |
|
"loss": 0.0634, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 11.116279069767442, |
|
"grad_norm": 2.134622812271118, |
|
"learning_rate": 5.5668604651162794e-05, |
|
"loss": 0.0387, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 11.13953488372093, |
|
"grad_norm": 1.0899347066879272, |
|
"learning_rate": 5.552325581395349e-05, |
|
"loss": 0.0455, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 11.162790697674419, |
|
"grad_norm": 0.905604898929596, |
|
"learning_rate": 5.5377906976744185e-05, |
|
"loss": 0.0156, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 11.162790697674419, |
|
"eval_accuracy": 0.9016393442622951, |
|
"eval_f1": 0.9008624425760596, |
|
"eval_loss": 0.3055068254470825, |
|
"eval_precision": 0.906113154668068, |
|
"eval_recall": 0.8995003015421728, |
|
"eval_runtime": 0.0798, |
|
"eval_samples_per_second": 3820.2, |
|
"eval_steps_per_second": 62.626, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 11.186046511627907, |
|
"grad_norm": 1.7722190618515015, |
|
"learning_rate": 5.5232558139534884e-05, |
|
"loss": 0.0672, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 11.209302325581396, |
|
"grad_norm": 3.3152947425842285, |
|
"learning_rate": 5.508720930232558e-05, |
|
"loss": 0.1093, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 11.232558139534884, |
|
"grad_norm": 3.6544511318206787, |
|
"learning_rate": 5.494186046511628e-05, |
|
"loss": 0.0894, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 11.255813953488373, |
|
"grad_norm": 4.428676128387451, |
|
"learning_rate": 5.4796511627906974e-05, |
|
"loss": 0.0495, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 11.279069767441861, |
|
"grad_norm": 3.5235519409179688, |
|
"learning_rate": 5.465116279069767e-05, |
|
"loss": 0.0313, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 11.30232558139535, |
|
"grad_norm": 1.8089061975479126, |
|
"learning_rate": 5.450581395348837e-05, |
|
"loss": 0.0294, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 11.325581395348838, |
|
"grad_norm": 9.173127174377441, |
|
"learning_rate": 5.436046511627907e-05, |
|
"loss": 0.0559, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 11.348837209302326, |
|
"grad_norm": 10.084165573120117, |
|
"learning_rate": 5.421511627906977e-05, |
|
"loss": 0.0759, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 11.372093023255815, |
|
"grad_norm": 0.5530898571014404, |
|
"learning_rate": 5.406976744186046e-05, |
|
"loss": 0.0202, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 11.395348837209303, |
|
"grad_norm": 4.215567588806152, |
|
"learning_rate": 5.392441860465116e-05, |
|
"loss": 0.1209, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 11.418604651162791, |
|
"grad_norm": 0.7583315372467041, |
|
"learning_rate": 5.377906976744186e-05, |
|
"loss": 0.0054, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 11.44186046511628, |
|
"grad_norm": 0.2435784786939621, |
|
"learning_rate": 5.363372093023256e-05, |
|
"loss": 0.0064, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 11.465116279069768, |
|
"grad_norm": 2.534301519393921, |
|
"learning_rate": 5.348837209302326e-05, |
|
"loss": 0.0733, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 11.488372093023255, |
|
"grad_norm": 3.94118332862854, |
|
"learning_rate": 5.334302325581395e-05, |
|
"loss": 0.0812, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 11.511627906976745, |
|
"grad_norm": 3.799800157546997, |
|
"learning_rate": 5.319767441860465e-05, |
|
"loss": 0.1377, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 11.534883720930232, |
|
"grad_norm": 0.16743521392345428, |
|
"learning_rate": 5.305232558139536e-05, |
|
"loss": 0.0054, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 11.55813953488372, |
|
"grad_norm": 1.3862305879592896, |
|
"learning_rate": 5.290697674418605e-05, |
|
"loss": 0.0364, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 11.581395348837209, |
|
"grad_norm": 3.8730082511901855, |
|
"learning_rate": 5.276162790697675e-05, |
|
"loss": 0.0827, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 11.604651162790697, |
|
"grad_norm": 5.6961188316345215, |
|
"learning_rate": 5.261627906976745e-05, |
|
"loss": 0.0936, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 11.627906976744185, |
|
"grad_norm": 0.9591822028160095, |
|
"learning_rate": 5.247093023255815e-05, |
|
"loss": 0.0146, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 11.627906976744185, |
|
"eval_accuracy": 0.9016393442622951, |
|
"eval_f1": 0.900974025974026, |
|
"eval_loss": 0.32393914461135864, |
|
"eval_precision": 0.9050144016758314, |
|
"eval_recall": 0.899780304988369, |
|
"eval_runtime": 0.075, |
|
"eval_samples_per_second": 4066.651, |
|
"eval_steps_per_second": 66.666, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 11.651162790697674, |
|
"grad_norm": 5.92799186706543, |
|
"learning_rate": 5.232558139534884e-05, |
|
"loss": 0.0904, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 11.674418604651162, |
|
"grad_norm": 9.12846851348877, |
|
"learning_rate": 5.218023255813954e-05, |
|
"loss": 0.322, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 11.69767441860465, |
|
"grad_norm": 2.95119571685791, |
|
"learning_rate": 5.203488372093024e-05, |
|
"loss": 0.0175, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 11.720930232558139, |
|
"grad_norm": 3.9121649265289307, |
|
"learning_rate": 5.188953488372094e-05, |
|
"loss": 0.0733, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 11.744186046511627, |
|
"grad_norm": 2.7404439449310303, |
|
"learning_rate": 5.1744186046511636e-05, |
|
"loss": 0.0178, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 11.767441860465116, |
|
"grad_norm": 4.207398891448975, |
|
"learning_rate": 5.159883720930233e-05, |
|
"loss": 0.1179, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 11.790697674418604, |
|
"grad_norm": 5.63287878036499, |
|
"learning_rate": 5.145348837209303e-05, |
|
"loss": 0.0728, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 11.813953488372093, |
|
"grad_norm": 4.2650556564331055, |
|
"learning_rate": 5.1308139534883726e-05, |
|
"loss": 0.0409, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 11.837209302325581, |
|
"grad_norm": 4.405393123626709, |
|
"learning_rate": 5.1162790697674425e-05, |
|
"loss": 0.1272, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 11.86046511627907, |
|
"grad_norm": 3.2369868755340576, |
|
"learning_rate": 5.1017441860465124e-05, |
|
"loss": 0.1354, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 11.883720930232558, |
|
"grad_norm": 2.492950439453125, |
|
"learning_rate": 5.0872093023255816e-05, |
|
"loss": 0.116, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 11.906976744186046, |
|
"grad_norm": 5.160562992095947, |
|
"learning_rate": 5.0726744186046515e-05, |
|
"loss": 0.0813, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 11.930232558139535, |
|
"grad_norm": 3.234938383102417, |
|
"learning_rate": 5.0581395348837214e-05, |
|
"loss": 0.041, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 11.953488372093023, |
|
"grad_norm": 2.3901174068450928, |
|
"learning_rate": 5.043604651162791e-05, |
|
"loss": 0.0285, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 11.976744186046512, |
|
"grad_norm": 1.9587090015411377, |
|
"learning_rate": 5.0290697674418605e-05, |
|
"loss": 0.0191, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"grad_norm": 1.5882123708724976, |
|
"learning_rate": 5.0145348837209304e-05, |
|
"loss": 0.0292, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 12.023255813953488, |
|
"grad_norm": 1.0756316184997559, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0348, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 12.046511627906977, |
|
"grad_norm": 7.145483016967773, |
|
"learning_rate": 4.98546511627907e-05, |
|
"loss": 0.1204, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 12.069767441860465, |
|
"grad_norm": 10.459267616271973, |
|
"learning_rate": 4.97093023255814e-05, |
|
"loss": 0.0682, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 12.093023255813954, |
|
"grad_norm": 2.6111533641815186, |
|
"learning_rate": 4.956395348837209e-05, |
|
"loss": 0.1088, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 12.093023255813954, |
|
"eval_accuracy": 0.9081967213114754, |
|
"eval_f1": 0.907471613070989, |
|
"eval_loss": 0.29710283875465393, |
|
"eval_precision": 0.9127912068663514, |
|
"eval_recall": 0.9060696131644697, |
|
"eval_runtime": 0.0726, |
|
"eval_samples_per_second": 4200.336, |
|
"eval_steps_per_second": 68.858, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 12.116279069767442, |
|
"grad_norm": 1.2339119911193848, |
|
"learning_rate": 4.941860465116279e-05, |
|
"loss": 0.0222, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 12.13953488372093, |
|
"grad_norm": 1.6148200035095215, |
|
"learning_rate": 4.927325581395349e-05, |
|
"loss": 0.0968, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 12.162790697674419, |
|
"grad_norm": 2.2320408821105957, |
|
"learning_rate": 4.912790697674419e-05, |
|
"loss": 0.0742, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 12.186046511627907, |
|
"grad_norm": 1.0352402925491333, |
|
"learning_rate": 4.898255813953488e-05, |
|
"loss": 0.0287, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 12.209302325581396, |
|
"grad_norm": 3.2948336601257324, |
|
"learning_rate": 4.883720930232558e-05, |
|
"loss": 0.0945, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 12.232558139534884, |
|
"grad_norm": 0.48531392216682434, |
|
"learning_rate": 4.869186046511628e-05, |
|
"loss": 0.0119, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 12.255813953488373, |
|
"grad_norm": 3.030161142349243, |
|
"learning_rate": 4.854651162790698e-05, |
|
"loss": 0.0713, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 12.279069767441861, |
|
"grad_norm": 1.6946715116500854, |
|
"learning_rate": 4.8401162790697676e-05, |
|
"loss": 0.0322, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 12.30232558139535, |
|
"grad_norm": 4.674280643463135, |
|
"learning_rate": 4.8255813953488375e-05, |
|
"loss": 0.048, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 12.325581395348838, |
|
"grad_norm": 1.2208576202392578, |
|
"learning_rate": 4.8110465116279074e-05, |
|
"loss": 0.009, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 12.348837209302326, |
|
"grad_norm": 7.181972026824951, |
|
"learning_rate": 4.796511627906977e-05, |
|
"loss": 0.0275, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 12.372093023255815, |
|
"grad_norm": 1.5175693035125732, |
|
"learning_rate": 4.781976744186047e-05, |
|
"loss": 0.0246, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 12.395348837209303, |
|
"grad_norm": 5.098257064819336, |
|
"learning_rate": 4.7674418604651164e-05, |
|
"loss": 0.0758, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 12.418604651162791, |
|
"grad_norm": 2.7552084922790527, |
|
"learning_rate": 4.752906976744186e-05, |
|
"loss": 0.0558, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 12.44186046511628, |
|
"grad_norm": 2.456735134124756, |
|
"learning_rate": 4.738372093023256e-05, |
|
"loss": 0.0484, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 12.465116279069768, |
|
"grad_norm": 1.6237318515777588, |
|
"learning_rate": 4.723837209302326e-05, |
|
"loss": 0.08, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 12.488372093023255, |
|
"grad_norm": 3.8842220306396484, |
|
"learning_rate": 4.709302325581396e-05, |
|
"loss": 0.0858, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 12.511627906976745, |
|
"grad_norm": 3.0373013019561768, |
|
"learning_rate": 4.694767441860465e-05, |
|
"loss": 0.0256, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 12.534883720930232, |
|
"grad_norm": 5.356001853942871, |
|
"learning_rate": 4.680232558139535e-05, |
|
"loss": 0.1057, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 12.55813953488372, |
|
"grad_norm": 0.7480242252349854, |
|
"learning_rate": 4.665697674418605e-05, |
|
"loss": 0.0102, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 12.55813953488372, |
|
"eval_accuracy": 0.898360655737705, |
|
"eval_f1": 0.8983213068212369, |
|
"eval_loss": 0.27956801652908325, |
|
"eval_precision": 0.8984348125214998, |
|
"eval_recall": 0.8991556819160851, |
|
"eval_runtime": 0.0733, |
|
"eval_samples_per_second": 4158.554, |
|
"eval_steps_per_second": 68.173, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 12.581395348837209, |
|
"grad_norm": 8.634737014770508, |
|
"learning_rate": 4.651162790697675e-05, |
|
"loss": 0.1041, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 12.604651162790697, |
|
"grad_norm": 2.154197931289673, |
|
"learning_rate": 4.636627906976744e-05, |
|
"loss": 0.0883, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 12.627906976744185, |
|
"grad_norm": 5.6526265144348145, |
|
"learning_rate": 4.622093023255814e-05, |
|
"loss": 0.0634, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 12.651162790697674, |
|
"grad_norm": 0.4163094162940979, |
|
"learning_rate": 4.607558139534884e-05, |
|
"loss": 0.0102, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 12.674418604651162, |
|
"grad_norm": 2.6310293674468994, |
|
"learning_rate": 4.593023255813954e-05, |
|
"loss": 0.0613, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 12.69767441860465, |
|
"grad_norm": 0.8978974223136902, |
|
"learning_rate": 4.5784883720930236e-05, |
|
"loss": 0.0118, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 12.720930232558139, |
|
"grad_norm": 3.0897459983825684, |
|
"learning_rate": 4.563953488372093e-05, |
|
"loss": 0.0592, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 12.744186046511627, |
|
"grad_norm": 1.5553648471832275, |
|
"learning_rate": 4.549418604651163e-05, |
|
"loss": 0.0357, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 12.767441860465116, |
|
"grad_norm": 1.9957389831542969, |
|
"learning_rate": 4.5348837209302326e-05, |
|
"loss": 0.0415, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 12.790697674418604, |
|
"grad_norm": 0.22972577810287476, |
|
"learning_rate": 4.520348837209303e-05, |
|
"loss": 0.0078, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 12.813953488372093, |
|
"grad_norm": 2.78206205368042, |
|
"learning_rate": 4.505813953488372e-05, |
|
"loss": 0.026, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 12.837209302325581, |
|
"grad_norm": 3.2643375396728516, |
|
"learning_rate": 4.491279069767442e-05, |
|
"loss": 0.0386, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 12.86046511627907, |
|
"grad_norm": 9.664105415344238, |
|
"learning_rate": 4.476744186046512e-05, |
|
"loss": 0.1431, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 12.883720930232558, |
|
"grad_norm": 3.2015540599823, |
|
"learning_rate": 4.462209302325582e-05, |
|
"loss": 0.0605, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 12.906976744186046, |
|
"grad_norm": 4.17910099029541, |
|
"learning_rate": 4.447674418604651e-05, |
|
"loss": 0.0797, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 12.930232558139535, |
|
"grad_norm": 3.999985933303833, |
|
"learning_rate": 4.433139534883721e-05, |
|
"loss": 0.117, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 12.953488372093023, |
|
"grad_norm": 1.1732484102249146, |
|
"learning_rate": 4.418604651162791e-05, |
|
"loss": 0.0244, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 12.976744186046512, |
|
"grad_norm": 4.184539794921875, |
|
"learning_rate": 4.404069767441861e-05, |
|
"loss": 0.0517, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"grad_norm": 2.3533170223236084, |
|
"learning_rate": 4.389534883720931e-05, |
|
"loss": 0.0505, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 13.023255813953488, |
|
"grad_norm": 2.3821420669555664, |
|
"learning_rate": 4.375e-05, |
|
"loss": 0.0253, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 13.023255813953488, |
|
"eval_accuracy": 0.8885245901639345, |
|
"eval_f1": 0.8879936058066189, |
|
"eval_loss": 0.3079434633255005, |
|
"eval_precision": 0.8900242992276317, |
|
"eval_recall": 0.8872016886361678, |
|
"eval_runtime": 0.0743, |
|
"eval_samples_per_second": 4102.318, |
|
"eval_steps_per_second": 67.251, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 13.046511627906977, |
|
"grad_norm": 3.6151607036590576, |
|
"learning_rate": 4.36046511627907e-05, |
|
"loss": 0.08, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 13.069767441860465, |
|
"grad_norm": 1.870180368423462, |
|
"learning_rate": 4.34593023255814e-05, |
|
"loss": 0.0475, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 13.093023255813954, |
|
"grad_norm": 3.9536831378936768, |
|
"learning_rate": 4.3313953488372096e-05, |
|
"loss": 0.0206, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 13.116279069767442, |
|
"grad_norm": 6.368700981140137, |
|
"learning_rate": 4.3168604651162795e-05, |
|
"loss": 0.0485, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 13.13953488372093, |
|
"grad_norm": 1.1507411003112793, |
|
"learning_rate": 4.302325581395349e-05, |
|
"loss": 0.0133, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 13.162790697674419, |
|
"grad_norm": 1.034654140472412, |
|
"learning_rate": 4.2877906976744186e-05, |
|
"loss": 0.0437, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 13.186046511627907, |
|
"grad_norm": 0.2083161622285843, |
|
"learning_rate": 4.2732558139534885e-05, |
|
"loss": 0.0054, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 13.209302325581396, |
|
"grad_norm": 1.1866053342819214, |
|
"learning_rate": 4.2587209302325584e-05, |
|
"loss": 0.0134, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 13.232558139534884, |
|
"grad_norm": 0.6963038444519043, |
|
"learning_rate": 4.2441860465116276e-05, |
|
"loss": 0.0083, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 13.255813953488373, |
|
"grad_norm": 3.132704973220825, |
|
"learning_rate": 4.2296511627906975e-05, |
|
"loss": 0.0567, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 13.279069767441861, |
|
"grad_norm": 1.62773859500885, |
|
"learning_rate": 4.215116279069768e-05, |
|
"loss": 0.0803, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 13.30232558139535, |
|
"grad_norm": 0.7541981935501099, |
|
"learning_rate": 4.200581395348838e-05, |
|
"loss": 0.0114, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 13.325581395348838, |
|
"grad_norm": 5.177672386169434, |
|
"learning_rate": 4.186046511627907e-05, |
|
"loss": 0.0302, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 13.348837209302326, |
|
"grad_norm": 3.653960704803467, |
|
"learning_rate": 4.171511627906977e-05, |
|
"loss": 0.0648, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 13.372093023255815, |
|
"grad_norm": 0.6348634362220764, |
|
"learning_rate": 4.156976744186047e-05, |
|
"loss": 0.0077, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 13.395348837209303, |
|
"grad_norm": 4.980737686157227, |
|
"learning_rate": 4.142441860465117e-05, |
|
"loss": 0.0857, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 13.418604651162791, |
|
"grad_norm": 4.5483551025390625, |
|
"learning_rate": 4.127906976744187e-05, |
|
"loss": 0.0456, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 13.44186046511628, |
|
"grad_norm": 4.054844379425049, |
|
"learning_rate": 4.113372093023256e-05, |
|
"loss": 0.0837, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 13.465116279069768, |
|
"grad_norm": 3.3397719860076904, |
|
"learning_rate": 4.098837209302326e-05, |
|
"loss": 0.0742, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 13.488372093023255, |
|
"grad_norm": 6.787147045135498, |
|
"learning_rate": 4.0843023255813957e-05, |
|
"loss": 0.0868, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 13.488372093023255, |
|
"eval_accuracy": 0.8918032786885246, |
|
"eval_f1": 0.8914251194701244, |
|
"eval_loss": 0.34987983107566833, |
|
"eval_precision": 0.8924277806607853, |
|
"eval_recall": 0.8909063496166107, |
|
"eval_runtime": 0.0747, |
|
"eval_samples_per_second": 4085.209, |
|
"eval_steps_per_second": 66.971, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 13.511627906976745, |
|
"grad_norm": 1.7752641439437866, |
|
"learning_rate": 4.0697674418604655e-05, |
|
"loss": 0.0476, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 13.534883720930232, |
|
"grad_norm": 2.80531907081604, |
|
"learning_rate": 4.055232558139535e-05, |
|
"loss": 0.0202, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 13.55813953488372, |
|
"grad_norm": 0.5729110836982727, |
|
"learning_rate": 4.0406976744186046e-05, |
|
"loss": 0.0078, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 13.581395348837209, |
|
"grad_norm": 5.221317768096924, |
|
"learning_rate": 4.0261627906976745e-05, |
|
"loss": 0.092, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 13.604651162790697, |
|
"grad_norm": 3.757556915283203, |
|
"learning_rate": 4.0116279069767444e-05, |
|
"loss": 0.024, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 13.627906976744185, |
|
"grad_norm": 6.441154479980469, |
|
"learning_rate": 3.997093023255814e-05, |
|
"loss": 0.1422, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 13.651162790697674, |
|
"grad_norm": 2.314418077468872, |
|
"learning_rate": 3.9825581395348835e-05, |
|
"loss": 0.0815, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 13.674418604651162, |
|
"grad_norm": 2.129340887069702, |
|
"learning_rate": 3.9680232558139534e-05, |
|
"loss": 0.0419, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 13.69767441860465, |
|
"grad_norm": 1.477169394493103, |
|
"learning_rate": 3.953488372093023e-05, |
|
"loss": 0.0379, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 13.720930232558139, |
|
"grad_norm": 2.567399740219116, |
|
"learning_rate": 3.938953488372093e-05, |
|
"loss": 0.1169, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 13.744186046511627, |
|
"grad_norm": 1.2707808017730713, |
|
"learning_rate": 3.924418604651163e-05, |
|
"loss": 0.0225, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 13.767441860465116, |
|
"grad_norm": 0.3642787039279938, |
|
"learning_rate": 3.909883720930232e-05, |
|
"loss": 0.0053, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 13.790697674418604, |
|
"grad_norm": 2.2786664962768555, |
|
"learning_rate": 3.895348837209303e-05, |
|
"loss": 0.043, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 13.813953488372093, |
|
"grad_norm": 6.119050979614258, |
|
"learning_rate": 3.880813953488373e-05, |
|
"loss": 0.1129, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 13.837209302325581, |
|
"grad_norm": 3.127246141433716, |
|
"learning_rate": 3.866279069767442e-05, |
|
"loss": 0.1094, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 13.86046511627907, |
|
"grad_norm": 4.50978946685791, |
|
"learning_rate": 3.851744186046512e-05, |
|
"loss": 0.0714, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 13.883720930232558, |
|
"grad_norm": 2.3526670932769775, |
|
"learning_rate": 3.837209302325582e-05, |
|
"loss": 0.1126, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 13.906976744186046, |
|
"grad_norm": 4.066991806030273, |
|
"learning_rate": 3.8226744186046516e-05, |
|
"loss": 0.0466, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 13.930232558139535, |
|
"grad_norm": 1.5120619535446167, |
|
"learning_rate": 3.8081395348837215e-05, |
|
"loss": 0.0609, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 13.953488372093023, |
|
"grad_norm": 3.2349016666412354, |
|
"learning_rate": 3.793604651162791e-05, |
|
"loss": 0.0399, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 13.953488372093023, |
|
"eval_accuracy": 0.8885245901639345, |
|
"eval_f1": 0.8880903047569713, |
|
"eval_loss": 0.36928993463516235, |
|
"eval_precision": 0.8893939393939394, |
|
"eval_recall": 0.887481692082364, |
|
"eval_runtime": 0.0753, |
|
"eval_samples_per_second": 4048.646, |
|
"eval_steps_per_second": 66.371, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 13.976744186046512, |
|
"grad_norm": 2.7635951042175293, |
|
"learning_rate": 3.7790697674418606e-05, |
|
"loss": 0.1149, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"grad_norm": 0.716441810131073, |
|
"learning_rate": 3.7645348837209305e-05, |
|
"loss": 0.0095, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 14.023255813953488, |
|
"grad_norm": 1.099993348121643, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.0179, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 14.046511627906977, |
|
"grad_norm": 0.9968247413635254, |
|
"learning_rate": 3.73546511627907e-05, |
|
"loss": 0.0295, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 14.069767441860465, |
|
"grad_norm": 0.9443352222442627, |
|
"learning_rate": 3.7209302325581394e-05, |
|
"loss": 0.0121, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 14.093023255813954, |
|
"grad_norm": 0.24217644333839417, |
|
"learning_rate": 3.706395348837209e-05, |
|
"loss": 0.0066, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 14.116279069767442, |
|
"grad_norm": 0.32041826844215393, |
|
"learning_rate": 3.691860465116279e-05, |
|
"loss": 0.0071, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 14.13953488372093, |
|
"grad_norm": 4.715015411376953, |
|
"learning_rate": 3.677325581395349e-05, |
|
"loss": 0.0775, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 14.162790697674419, |
|
"grad_norm": 1.9597333669662476, |
|
"learning_rate": 3.662790697674418e-05, |
|
"loss": 0.0232, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 14.186046511627907, |
|
"grad_norm": 1.3680325746536255, |
|
"learning_rate": 3.648255813953488e-05, |
|
"loss": 0.0194, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 14.209302325581396, |
|
"grad_norm": 7.5977396965026855, |
|
"learning_rate": 3.633720930232558e-05, |
|
"loss": 0.1274, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 14.232558139534884, |
|
"grad_norm": 7.637692451477051, |
|
"learning_rate": 3.619186046511628e-05, |
|
"loss": 0.1341, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 14.255813953488373, |
|
"grad_norm": 10.332743644714355, |
|
"learning_rate": 3.604651162790698e-05, |
|
"loss": 0.1039, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 14.279069767441861, |
|
"grad_norm": 1.6394683122634888, |
|
"learning_rate": 3.590116279069768e-05, |
|
"loss": 0.0126, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 14.30232558139535, |
|
"grad_norm": 1.69204580783844, |
|
"learning_rate": 3.5755813953488376e-05, |
|
"loss": 0.0327, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 14.325581395348838, |
|
"grad_norm": 3.1536524295806885, |
|
"learning_rate": 3.5610465116279075e-05, |
|
"loss": 0.0347, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 14.348837209302326, |
|
"grad_norm": 0.6318650841712952, |
|
"learning_rate": 3.5465116279069774e-05, |
|
"loss": 0.0424, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 14.372093023255815, |
|
"grad_norm": 5.714948654174805, |
|
"learning_rate": 3.5319767441860466e-05, |
|
"loss": 0.0339, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 14.395348837209303, |
|
"grad_norm": 4.141314506530762, |
|
"learning_rate": 3.5174418604651165e-05, |
|
"loss": 0.045, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 14.418604651162791, |
|
"grad_norm": 1.1429768800735474, |
|
"learning_rate": 3.5029069767441864e-05, |
|
"loss": 0.0291, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 14.418604651162791, |
|
"eval_accuracy": 0.8950819672131147, |
|
"eval_f1": 0.8944819235426397, |
|
"eval_loss": 0.3720164895057678, |
|
"eval_precision": 0.8974286460146188, |
|
"eval_recall": 0.8934909968122684, |
|
"eval_runtime": 0.0902, |
|
"eval_samples_per_second": 3381.6, |
|
"eval_steps_per_second": 55.436, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 14.44186046511628, |
|
"grad_norm": 11.266057014465332, |
|
"learning_rate": 3.488372093023256e-05, |
|
"loss": 0.0526, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 14.465116279069768, |
|
"grad_norm": 0.8715966939926147, |
|
"learning_rate": 3.4738372093023255e-05, |
|
"loss": 0.0256, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 14.488372093023255, |
|
"grad_norm": 3.1204757690429688, |
|
"learning_rate": 3.4593023255813954e-05, |
|
"loss": 0.0256, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 14.511627906976745, |
|
"grad_norm": 2.438506841659546, |
|
"learning_rate": 3.444767441860465e-05, |
|
"loss": 0.0321, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 14.534883720930232, |
|
"grad_norm": 3.6766304969787598, |
|
"learning_rate": 3.430232558139535e-05, |
|
"loss": 0.1027, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 14.55813953488372, |
|
"grad_norm": 6.79380464553833, |
|
"learning_rate": 3.415697674418605e-05, |
|
"loss": 0.0597, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 14.581395348837209, |
|
"grad_norm": 1.6378151178359985, |
|
"learning_rate": 3.401162790697674e-05, |
|
"loss": 0.0238, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 14.604651162790697, |
|
"grad_norm": 0.5451012253761292, |
|
"learning_rate": 3.386627906976744e-05, |
|
"loss": 0.0103, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 14.627906976744185, |
|
"grad_norm": 2.0606513023376465, |
|
"learning_rate": 3.372093023255814e-05, |
|
"loss": 0.0223, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 14.651162790697674, |
|
"grad_norm": 0.4602338373661041, |
|
"learning_rate": 3.357558139534884e-05, |
|
"loss": 0.0057, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 14.674418604651162, |
|
"grad_norm": 4.629768371582031, |
|
"learning_rate": 3.343023255813954e-05, |
|
"loss": 0.1183, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 14.69767441860465, |
|
"grad_norm": 6.286869525909424, |
|
"learning_rate": 3.328488372093023e-05, |
|
"loss": 0.0308, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 14.720930232558139, |
|
"grad_norm": 1.118071436882019, |
|
"learning_rate": 3.313953488372093e-05, |
|
"loss": 0.0692, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 14.744186046511627, |
|
"grad_norm": 2.621478319168091, |
|
"learning_rate": 3.299418604651163e-05, |
|
"loss": 0.0206, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 14.767441860465116, |
|
"grad_norm": 2.408926486968994, |
|
"learning_rate": 3.284883720930233e-05, |
|
"loss": 0.0868, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 14.790697674418604, |
|
"grad_norm": 4.2226104736328125, |
|
"learning_rate": 3.2703488372093026e-05, |
|
"loss": 0.0165, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 14.813953488372093, |
|
"grad_norm": 8.624298095703125, |
|
"learning_rate": 3.2558139534883724e-05, |
|
"loss": 0.1733, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 14.837209302325581, |
|
"grad_norm": 5.992215633392334, |
|
"learning_rate": 3.241279069767442e-05, |
|
"loss": 0.0286, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 14.86046511627907, |
|
"grad_norm": 0.8743571043014526, |
|
"learning_rate": 3.226744186046512e-05, |
|
"loss": 0.0113, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 14.883720930232558, |
|
"grad_norm": 0.9226441979408264, |
|
"learning_rate": 3.2122093023255814e-05, |
|
"loss": 0.0069, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 14.883720930232558, |
|
"eval_accuracy": 0.8918032786885246, |
|
"eval_f1": 0.8915047052355851, |
|
"eval_loss": 0.3578951358795166, |
|
"eval_precision": 0.8919968919968919, |
|
"eval_recall": 0.8911863530628069, |
|
"eval_runtime": 0.0756, |
|
"eval_samples_per_second": 4035.886, |
|
"eval_steps_per_second": 66.162, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 14.906976744186046, |
|
"grad_norm": 4.145755767822266, |
|
"learning_rate": 3.197674418604651e-05, |
|
"loss": 0.0439, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 14.930232558139535, |
|
"grad_norm": 1.2993311882019043, |
|
"learning_rate": 3.183139534883721e-05, |
|
"loss": 0.0446, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 14.953488372093023, |
|
"grad_norm": 0.2827296555042267, |
|
"learning_rate": 3.168604651162791e-05, |
|
"loss": 0.0052, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 14.976744186046512, |
|
"grad_norm": 2.9335291385650635, |
|
"learning_rate": 3.154069767441861e-05, |
|
"loss": 0.0644, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"grad_norm": 7.6974968910217285, |
|
"learning_rate": 3.13953488372093e-05, |
|
"loss": 0.0456, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 15.023255813953488, |
|
"grad_norm": 2.137190103530884, |
|
"learning_rate": 3.125e-05, |
|
"loss": 0.0147, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 15.046511627906977, |
|
"grad_norm": 0.834999680519104, |
|
"learning_rate": 3.11046511627907e-05, |
|
"loss": 0.0181, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 15.069767441860465, |
|
"grad_norm": 0.8868411183357239, |
|
"learning_rate": 3.09593023255814e-05, |
|
"loss": 0.0467, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 15.093023255813954, |
|
"grad_norm": 3.1377875804901123, |
|
"learning_rate": 3.081395348837209e-05, |
|
"loss": 0.0281, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 15.116279069767442, |
|
"grad_norm": 3.3987367153167725, |
|
"learning_rate": 3.066860465116279e-05, |
|
"loss": 0.1133, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 15.13953488372093, |
|
"grad_norm": 0.19906505942344666, |
|
"learning_rate": 3.052325581395349e-05, |
|
"loss": 0.0032, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 15.162790697674419, |
|
"grad_norm": 4.011625289916992, |
|
"learning_rate": 3.0377906976744187e-05, |
|
"loss": 0.0792, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 15.186046511627907, |
|
"grad_norm": 9.421031951904297, |
|
"learning_rate": 3.0232558139534883e-05, |
|
"loss": 0.0612, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 15.209302325581396, |
|
"grad_norm": 3.5618064403533936, |
|
"learning_rate": 3.008720930232558e-05, |
|
"loss": 0.0159, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 15.232558139534884, |
|
"grad_norm": 7.569765090942383, |
|
"learning_rate": 2.9941860465116277e-05, |
|
"loss": 0.1039, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 15.255813953488373, |
|
"grad_norm": 6.453846454620361, |
|
"learning_rate": 2.9796511627906976e-05, |
|
"loss": 0.0283, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 15.279069767441861, |
|
"grad_norm": 0.1914948970079422, |
|
"learning_rate": 2.9651162790697678e-05, |
|
"loss": 0.0033, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 15.30232558139535, |
|
"grad_norm": 2.6470947265625, |
|
"learning_rate": 2.9505813953488377e-05, |
|
"loss": 0.0505, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 15.325581395348838, |
|
"grad_norm": 5.789623737335205, |
|
"learning_rate": 2.9360465116279072e-05, |
|
"loss": 0.0419, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 15.348837209302326, |
|
"grad_norm": 1.2438925504684448, |
|
"learning_rate": 2.921511627906977e-05, |
|
"loss": 0.027, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 15.348837209302326, |
|
"eval_accuracy": 0.898360655737705, |
|
"eval_f1": 0.8980801776455497, |
|
"eval_loss": 0.36847957968711853, |
|
"eval_precision": 0.8985798152464819, |
|
"eval_recall": 0.8977556646851038, |
|
"eval_runtime": 0.0739, |
|
"eval_samples_per_second": 4125.669, |
|
"eval_steps_per_second": 67.634, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 15.372093023255815, |
|
"grad_norm": 2.8159120082855225, |
|
"learning_rate": 2.9069767441860467e-05, |
|
"loss": 0.0665, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 15.395348837209303, |
|
"grad_norm": 3.529680013656616, |
|
"learning_rate": 2.8924418604651166e-05, |
|
"loss": 0.0238, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 15.418604651162791, |
|
"grad_norm": 0.9973799586296082, |
|
"learning_rate": 2.8779069767441864e-05, |
|
"loss": 0.0051, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 15.44186046511628, |
|
"grad_norm": 2.763955593109131, |
|
"learning_rate": 2.863372093023256e-05, |
|
"loss": 0.0107, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 15.465116279069768, |
|
"grad_norm": 3.6373209953308105, |
|
"learning_rate": 2.848837209302326e-05, |
|
"loss": 0.0466, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 15.488372093023255, |
|
"grad_norm": 3.262023448944092, |
|
"learning_rate": 2.8343023255813954e-05, |
|
"loss": 0.025, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 15.511627906976745, |
|
"grad_norm": 0.7062492370605469, |
|
"learning_rate": 2.8197674418604653e-05, |
|
"loss": 0.0067, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 15.534883720930232, |
|
"grad_norm": 2.9254820346832275, |
|
"learning_rate": 2.805232558139535e-05, |
|
"loss": 0.0394, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 15.55813953488372, |
|
"grad_norm": 1.5290218591690063, |
|
"learning_rate": 2.7906976744186048e-05, |
|
"loss": 0.0268, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 15.581395348837209, |
|
"grad_norm": 3.9581668376922607, |
|
"learning_rate": 2.7761627906976746e-05, |
|
"loss": 0.0683, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 15.604651162790697, |
|
"grad_norm": 3.121964454650879, |
|
"learning_rate": 2.7616279069767442e-05, |
|
"loss": 0.0307, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 15.627906976744185, |
|
"grad_norm": 4.223913669586182, |
|
"learning_rate": 2.747093023255814e-05, |
|
"loss": 0.0778, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 15.651162790697674, |
|
"grad_norm": 1.8996158838272095, |
|
"learning_rate": 2.7325581395348836e-05, |
|
"loss": 0.0149, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 15.674418604651162, |
|
"grad_norm": 2.5367538928985596, |
|
"learning_rate": 2.7180232558139535e-05, |
|
"loss": 0.0238, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 15.69767441860465, |
|
"grad_norm": 1.5632944107055664, |
|
"learning_rate": 2.703488372093023e-05, |
|
"loss": 0.0159, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 15.720930232558139, |
|
"grad_norm": 2.031212329864502, |
|
"learning_rate": 2.688953488372093e-05, |
|
"loss": 0.0265, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 15.744186046511627, |
|
"grad_norm": 1.0653328895568848, |
|
"learning_rate": 2.674418604651163e-05, |
|
"loss": 0.0146, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 15.767441860465116, |
|
"grad_norm": 2.078573226928711, |
|
"learning_rate": 2.6598837209302324e-05, |
|
"loss": 0.0314, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 15.790697674418604, |
|
"grad_norm": 1.004683017730713, |
|
"learning_rate": 2.6453488372093026e-05, |
|
"loss": 0.0079, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 15.813953488372093, |
|
"grad_norm": 0.6423361897468567, |
|
"learning_rate": 2.6308139534883725e-05, |
|
"loss": 0.0265, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 15.813953488372093, |
|
"eval_accuracy": 0.898360655737705, |
|
"eval_f1": 0.8981460534962133, |
|
"eval_loss": 0.3592439889907837, |
|
"eval_precision": 0.8982758620689655, |
|
"eval_recall": 0.8980356681313001, |
|
"eval_runtime": 0.0743, |
|
"eval_samples_per_second": 4106.888, |
|
"eval_steps_per_second": 67.326, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 15.837209302325581, |
|
"grad_norm": 2.9641597270965576, |
|
"learning_rate": 2.616279069767442e-05, |
|
"loss": 0.0753, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 15.86046511627907, |
|
"grad_norm": 5.338639259338379, |
|
"learning_rate": 2.601744186046512e-05, |
|
"loss": 0.0325, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 15.883720930232558, |
|
"grad_norm": 1.9736541509628296, |
|
"learning_rate": 2.5872093023255818e-05, |
|
"loss": 0.0236, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 15.906976744186046, |
|
"grad_norm": 0.5987362265586853, |
|
"learning_rate": 2.5726744186046514e-05, |
|
"loss": 0.0056, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 15.930232558139535, |
|
"grad_norm": 1.9034494161605835, |
|
"learning_rate": 2.5581395348837212e-05, |
|
"loss": 0.095, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 15.953488372093023, |
|
"grad_norm": 0.7597903609275818, |
|
"learning_rate": 2.5436046511627908e-05, |
|
"loss": 0.0111, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 15.976744186046512, |
|
"grad_norm": 2.3135693073272705, |
|
"learning_rate": 2.5290697674418607e-05, |
|
"loss": 0.0792, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"grad_norm": 0.07970131933689117, |
|
"learning_rate": 2.5145348837209302e-05, |
|
"loss": 0.0024, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 16.023255813953487, |
|
"grad_norm": 1.8181146383285522, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.0181, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 16.046511627906977, |
|
"grad_norm": 4.323759078979492, |
|
"learning_rate": 2.48546511627907e-05, |
|
"loss": 0.0386, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 16.069767441860463, |
|
"grad_norm": 3.518137216567993, |
|
"learning_rate": 2.4709302325581396e-05, |
|
"loss": 0.0794, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 16.093023255813954, |
|
"grad_norm": 0.384950190782547, |
|
"learning_rate": 2.4563953488372094e-05, |
|
"loss": 0.0042, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 16.11627906976744, |
|
"grad_norm": 0.4682580232620239, |
|
"learning_rate": 2.441860465116279e-05, |
|
"loss": 0.0078, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 16.13953488372093, |
|
"grad_norm": 1.6920162439346313, |
|
"learning_rate": 2.427325581395349e-05, |
|
"loss": 0.0181, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 16.162790697674417, |
|
"grad_norm": 3.120163679122925, |
|
"learning_rate": 2.4127906976744188e-05, |
|
"loss": 0.0414, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 16.186046511627907, |
|
"grad_norm": 0.8569607734680176, |
|
"learning_rate": 2.3982558139534887e-05, |
|
"loss": 0.0063, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 16.209302325581394, |
|
"grad_norm": 1.5792855024337769, |
|
"learning_rate": 2.3837209302325582e-05, |
|
"loss": 0.0233, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 16.232558139534884, |
|
"grad_norm": 11.798049926757812, |
|
"learning_rate": 2.369186046511628e-05, |
|
"loss": 0.031, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 16.25581395348837, |
|
"grad_norm": 2.0980348587036133, |
|
"learning_rate": 2.354651162790698e-05, |
|
"loss": 0.0345, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 16.27906976744186, |
|
"grad_norm": 1.9684696197509766, |
|
"learning_rate": 2.3401162790697675e-05, |
|
"loss": 0.0109, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 16.27906976744186, |
|
"eval_accuracy": 0.898360655737705, |
|
"eval_f1": 0.8979217273954117, |
|
"eval_loss": 0.36475130915641785, |
|
"eval_precision": 0.8996056167114501, |
|
"eval_recall": 0.8971956577927114, |
|
"eval_runtime": 0.0743, |
|
"eval_samples_per_second": 4105.148, |
|
"eval_steps_per_second": 67.298, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 16.302325581395348, |
|
"grad_norm": 1.2527034282684326, |
|
"learning_rate": 2.3255813953488374e-05, |
|
"loss": 0.0088, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 16.325581395348838, |
|
"grad_norm": 4.517597198486328, |
|
"learning_rate": 2.311046511627907e-05, |
|
"loss": 0.05, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 16.348837209302324, |
|
"grad_norm": 2.9931998252868652, |
|
"learning_rate": 2.296511627906977e-05, |
|
"loss": 0.0852, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 16.372093023255815, |
|
"grad_norm": 1.5361837148666382, |
|
"learning_rate": 2.2819767441860464e-05, |
|
"loss": 0.0205, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 16.3953488372093, |
|
"grad_norm": 12.71114730834961, |
|
"learning_rate": 2.2674418604651163e-05, |
|
"loss": 0.0643, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 16.41860465116279, |
|
"grad_norm": 0.1732826828956604, |
|
"learning_rate": 2.252906976744186e-05, |
|
"loss": 0.0028, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 16.441860465116278, |
|
"grad_norm": 5.568197250366211, |
|
"learning_rate": 2.238372093023256e-05, |
|
"loss": 0.0147, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 16.46511627906977, |
|
"grad_norm": 3.462963581085205, |
|
"learning_rate": 2.2238372093023256e-05, |
|
"loss": 0.0639, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 16.488372093023255, |
|
"grad_norm": 0.7707027792930603, |
|
"learning_rate": 2.2093023255813955e-05, |
|
"loss": 0.0052, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 16.511627906976745, |
|
"grad_norm": 2.6119136810302734, |
|
"learning_rate": 2.1947674418604654e-05, |
|
"loss": 0.0679, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 16.53488372093023, |
|
"grad_norm": 4.328090190887451, |
|
"learning_rate": 2.180232558139535e-05, |
|
"loss": 0.0336, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 16.558139534883722, |
|
"grad_norm": 3.516861915588379, |
|
"learning_rate": 2.1656976744186048e-05, |
|
"loss": 0.1004, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 16.58139534883721, |
|
"grad_norm": 0.8900988698005676, |
|
"learning_rate": 2.1511627906976744e-05, |
|
"loss": 0.0083, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 16.6046511627907, |
|
"grad_norm": 4.924454689025879, |
|
"learning_rate": 2.1366279069767442e-05, |
|
"loss": 0.1108, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 16.627906976744185, |
|
"grad_norm": 4.6815619468688965, |
|
"learning_rate": 2.1220930232558138e-05, |
|
"loss": 0.0611, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 16.651162790697676, |
|
"grad_norm": 7.625583171844482, |
|
"learning_rate": 2.107558139534884e-05, |
|
"loss": 0.0698, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 16.674418604651162, |
|
"grad_norm": 2.8963229656219482, |
|
"learning_rate": 2.0930232558139536e-05, |
|
"loss": 0.0226, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 16.697674418604652, |
|
"grad_norm": 1.349528193473816, |
|
"learning_rate": 2.0784883720930235e-05, |
|
"loss": 0.0074, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 16.72093023255814, |
|
"grad_norm": 0.9660943746566772, |
|
"learning_rate": 2.0639534883720933e-05, |
|
"loss": 0.0042, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 16.74418604651163, |
|
"grad_norm": 0.205625519156456, |
|
"learning_rate": 2.049418604651163e-05, |
|
"loss": 0.0036, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 16.74418604651163, |
|
"eval_accuracy": 0.9081967213114754, |
|
"eval_f1": 0.9080296372878436, |
|
"eval_loss": 0.3279436528682709, |
|
"eval_precision": 0.9080296372878436, |
|
"eval_recall": 0.9080296372878436, |
|
"eval_runtime": 0.0765, |
|
"eval_samples_per_second": 3988.597, |
|
"eval_steps_per_second": 65.387, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 16.767441860465116, |
|
"grad_norm": 9.84315299987793, |
|
"learning_rate": 2.0348837209302328e-05, |
|
"loss": 0.1307, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 16.790697674418606, |
|
"grad_norm": 11.633748054504395, |
|
"learning_rate": 2.0203488372093023e-05, |
|
"loss": 0.065, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 16.813953488372093, |
|
"grad_norm": 2.6331350803375244, |
|
"learning_rate": 2.0058139534883722e-05, |
|
"loss": 0.0336, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 16.837209302325583, |
|
"grad_norm": 6.747828006744385, |
|
"learning_rate": 1.9912790697674418e-05, |
|
"loss": 0.0478, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 16.86046511627907, |
|
"grad_norm": 1.7129838466644287, |
|
"learning_rate": 1.9767441860465116e-05, |
|
"loss": 0.0403, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 16.88372093023256, |
|
"grad_norm": 1.0863773822784424, |
|
"learning_rate": 1.9622093023255815e-05, |
|
"loss": 0.0105, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 16.906976744186046, |
|
"grad_norm": 1.2210698127746582, |
|
"learning_rate": 1.9476744186046514e-05, |
|
"loss": 0.0114, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 16.930232558139537, |
|
"grad_norm": 3.351280689239502, |
|
"learning_rate": 1.933139534883721e-05, |
|
"loss": 0.0353, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 16.953488372093023, |
|
"grad_norm": 1.9887293577194214, |
|
"learning_rate": 1.918604651162791e-05, |
|
"loss": 0.0208, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 16.97674418604651, |
|
"grad_norm": 0.7091693878173828, |
|
"learning_rate": 1.9040697674418607e-05, |
|
"loss": 0.0074, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"grad_norm": 0.6858041882514954, |
|
"learning_rate": 1.8895348837209303e-05, |
|
"loss": 0.0051, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 17.023255813953487, |
|
"grad_norm": 4.898313045501709, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 0.1197, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 17.046511627906977, |
|
"grad_norm": 2.349663496017456, |
|
"learning_rate": 1.8604651162790697e-05, |
|
"loss": 0.0244, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 17.069767441860463, |
|
"grad_norm": 2.8203916549682617, |
|
"learning_rate": 1.8459302325581396e-05, |
|
"loss": 0.0801, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 17.093023255813954, |
|
"grad_norm": 4.241973400115967, |
|
"learning_rate": 1.831395348837209e-05, |
|
"loss": 0.0303, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 17.11627906976744, |
|
"grad_norm": 7.765374660491943, |
|
"learning_rate": 1.816860465116279e-05, |
|
"loss": 0.0758, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 17.13953488372093, |
|
"grad_norm": 7.214727401733398, |
|
"learning_rate": 1.802325581395349e-05, |
|
"loss": 0.0461, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 17.162790697674417, |
|
"grad_norm": 7.209427833557129, |
|
"learning_rate": 1.7877906976744188e-05, |
|
"loss": 0.0354, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 17.186046511627907, |
|
"grad_norm": 4.5369744300842285, |
|
"learning_rate": 1.7732558139534887e-05, |
|
"loss": 0.0223, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 17.209302325581394, |
|
"grad_norm": 0.8520297408103943, |
|
"learning_rate": 1.7587209302325583e-05, |
|
"loss": 0.0115, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 17.209302325581394, |
|
"eval_accuracy": 0.8950819672131147, |
|
"eval_f1": 0.8945822172297591, |
|
"eval_loss": 0.35655906796455383, |
|
"eval_precision": 0.8966414996094767, |
|
"eval_recall": 0.8937710002584647, |
|
"eval_runtime": 0.0741, |
|
"eval_samples_per_second": 4118.417, |
|
"eval_steps_per_second": 67.515, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 17.232558139534884, |
|
"grad_norm": 2.845405101776123, |
|
"learning_rate": 1.744186046511628e-05, |
|
"loss": 0.0416, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 17.25581395348837, |
|
"grad_norm": 12.433396339416504, |
|
"learning_rate": 1.7296511627906977e-05, |
|
"loss": 0.0335, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 17.27906976744186, |
|
"grad_norm": 0.4390711486339569, |
|
"learning_rate": 1.7151162790697676e-05, |
|
"loss": 0.0062, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 17.302325581395348, |
|
"grad_norm": 2.333735942840576, |
|
"learning_rate": 1.700581395348837e-05, |
|
"loss": 0.0303, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 17.325581395348838, |
|
"grad_norm": 0.1029660701751709, |
|
"learning_rate": 1.686046511627907e-05, |
|
"loss": 0.0023, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 17.348837209302324, |
|
"grad_norm": 4.543797016143799, |
|
"learning_rate": 1.671511627906977e-05, |
|
"loss": 0.0634, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 17.372093023255815, |
|
"grad_norm": 4.657381534576416, |
|
"learning_rate": 1.6569767441860464e-05, |
|
"loss": 0.0199, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 17.3953488372093, |
|
"grad_norm": 6.3823018074035645, |
|
"learning_rate": 1.6424418604651163e-05, |
|
"loss": 0.1186, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 17.41860465116279, |
|
"grad_norm": 1.975292682647705, |
|
"learning_rate": 1.6279069767441862e-05, |
|
"loss": 0.0948, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 17.441860465116278, |
|
"grad_norm": 4.323884010314941, |
|
"learning_rate": 1.613372093023256e-05, |
|
"loss": 0.1327, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 17.46511627906977, |
|
"grad_norm": 1.2401987314224243, |
|
"learning_rate": 1.5988372093023257e-05, |
|
"loss": 0.0251, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 17.488372093023255, |
|
"grad_norm": 0.8695264458656311, |
|
"learning_rate": 1.5843023255813955e-05, |
|
"loss": 0.005, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 17.511627906976745, |
|
"grad_norm": 2.0256264209747314, |
|
"learning_rate": 1.569767441860465e-05, |
|
"loss": 0.0133, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 17.53488372093023, |
|
"grad_norm": 0.6134868264198303, |
|
"learning_rate": 1.555232558139535e-05, |
|
"loss": 0.0103, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 17.558139534883722, |
|
"grad_norm": 4.545815467834473, |
|
"learning_rate": 1.5406976744186045e-05, |
|
"loss": 0.0191, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 17.58139534883721, |
|
"grad_norm": 3.8483781814575195, |
|
"learning_rate": 1.5261627906976744e-05, |
|
"loss": 0.0311, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 17.6046511627907, |
|
"grad_norm": 2.32814359664917, |
|
"learning_rate": 1.5116279069767441e-05, |
|
"loss": 0.023, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 17.627906976744185, |
|
"grad_norm": 0.3794184625148773, |
|
"learning_rate": 1.4970930232558138e-05, |
|
"loss": 0.0054, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 17.651162790697676, |
|
"grad_norm": 1.3837960958480835, |
|
"learning_rate": 1.4825581395348839e-05, |
|
"loss": 0.0342, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 17.674418604651162, |
|
"grad_norm": 1.8951282501220703, |
|
"learning_rate": 1.4680232558139536e-05, |
|
"loss": 0.0618, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 17.674418604651162, |
|
"eval_accuracy": 0.898360655737705, |
|
"eval_f1": 0.8980054152598138, |
|
"eval_loss": 0.3718366026878357, |
|
"eval_precision": 0.8990226604393703, |
|
"eval_recall": 0.8974756612389075, |
|
"eval_runtime": 0.0766, |
|
"eval_samples_per_second": 3981.633, |
|
"eval_steps_per_second": 65.273, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 17.697674418604652, |
|
"grad_norm": 1.703765630722046, |
|
"learning_rate": 1.4534883720930233e-05, |
|
"loss": 0.0089, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 17.72093023255814, |
|
"grad_norm": 2.194350004196167, |
|
"learning_rate": 1.4389534883720932e-05, |
|
"loss": 0.0156, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 17.74418604651163, |
|
"grad_norm": 1.5929670333862305, |
|
"learning_rate": 1.424418604651163e-05, |
|
"loss": 0.0093, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 17.767441860465116, |
|
"grad_norm": 1.1527395248413086, |
|
"learning_rate": 1.4098837209302327e-05, |
|
"loss": 0.0087, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 17.790697674418606, |
|
"grad_norm": 0.9619439244270325, |
|
"learning_rate": 1.3953488372093024e-05, |
|
"loss": 0.0101, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 17.813953488372093, |
|
"grad_norm": 2.041372537612915, |
|
"learning_rate": 1.3808139534883721e-05, |
|
"loss": 0.0259, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 17.837209302325583, |
|
"grad_norm": 4.107051849365234, |
|
"learning_rate": 1.3662790697674418e-05, |
|
"loss": 0.0195, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 17.86046511627907, |
|
"grad_norm": 0.8670945763587952, |
|
"learning_rate": 1.3517441860465115e-05, |
|
"loss": 0.0076, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 17.88372093023256, |
|
"grad_norm": 2.5873591899871826, |
|
"learning_rate": 1.3372093023255814e-05, |
|
"loss": 0.0325, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 17.906976744186046, |
|
"grad_norm": 3.630141496658325, |
|
"learning_rate": 1.3226744186046513e-05, |
|
"loss": 0.0261, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 17.930232558139537, |
|
"grad_norm": 0.1693820059299469, |
|
"learning_rate": 1.308139534883721e-05, |
|
"loss": 0.0036, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 17.953488372093023, |
|
"grad_norm": 0.5527888536453247, |
|
"learning_rate": 1.2936046511627909e-05, |
|
"loss": 0.0051, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 17.97674418604651, |
|
"grad_norm": 0.8153883814811707, |
|
"learning_rate": 1.2790697674418606e-05, |
|
"loss": 0.0095, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"grad_norm": 0.2584695518016815, |
|
"learning_rate": 1.2645348837209303e-05, |
|
"loss": 0.0036, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 18.023255813953487, |
|
"grad_norm": 0.5313040018081665, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.0071, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 18.046511627906977, |
|
"grad_norm": 1.0279793739318848, |
|
"learning_rate": 1.2354651162790698e-05, |
|
"loss": 0.01, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 18.069767441860463, |
|
"grad_norm": 1.0528727769851685, |
|
"learning_rate": 1.2209302325581395e-05, |
|
"loss": 0.0168, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 18.093023255813954, |
|
"grad_norm": 1.2885291576385498, |
|
"learning_rate": 1.2063953488372094e-05, |
|
"loss": 0.0077, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 18.11627906976744, |
|
"grad_norm": 0.3943243622779846, |
|
"learning_rate": 1.1918604651162791e-05, |
|
"loss": 0.0026, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 18.13953488372093, |
|
"grad_norm": 1.1616085767745972, |
|
"learning_rate": 1.177325581395349e-05, |
|
"loss": 0.0061, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 18.13953488372093, |
|
"eval_accuracy": 0.898360655737705, |
|
"eval_f1": 0.8981460534962133, |
|
"eval_loss": 0.37966448068618774, |
|
"eval_precision": 0.8982758620689655, |
|
"eval_recall": 0.8980356681313001, |
|
"eval_runtime": 0.0743, |
|
"eval_samples_per_second": 4106.506, |
|
"eval_steps_per_second": 67.32, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 18.162790697674417, |
|
"grad_norm": 0.11722904443740845, |
|
"learning_rate": 1.1627906976744187e-05, |
|
"loss": 0.0023, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 18.186046511627907, |
|
"grad_norm": 1.2253535985946655, |
|
"learning_rate": 1.1482558139534884e-05, |
|
"loss": 0.0182, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 18.209302325581394, |
|
"grad_norm": 0.4131571054458618, |
|
"learning_rate": 1.1337209302325581e-05, |
|
"loss": 0.0038, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 18.232558139534884, |
|
"grad_norm": 0.5466241240501404, |
|
"learning_rate": 1.119186046511628e-05, |
|
"loss": 0.0033, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 18.25581395348837, |
|
"grad_norm": 0.8692965507507324, |
|
"learning_rate": 1.1046511627906977e-05, |
|
"loss": 0.0079, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 18.27906976744186, |
|
"grad_norm": 3.9168105125427246, |
|
"learning_rate": 1.0901162790697675e-05, |
|
"loss": 0.0861, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 18.302325581395348, |
|
"grad_norm": 1.2862012386322021, |
|
"learning_rate": 1.0755813953488372e-05, |
|
"loss": 0.0081, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 18.325581395348838, |
|
"grad_norm": 6.316368103027344, |
|
"learning_rate": 1.0610465116279069e-05, |
|
"loss": 0.044, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 18.348837209302324, |
|
"grad_norm": 7.892980098724365, |
|
"learning_rate": 1.0465116279069768e-05, |
|
"loss": 0.1037, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 18.372093023255815, |
|
"grad_norm": 7.4783172607421875, |
|
"learning_rate": 1.0319767441860467e-05, |
|
"loss": 0.0339, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 18.3953488372093, |
|
"grad_norm": 0.7634483575820923, |
|
"learning_rate": 1.0174418604651164e-05, |
|
"loss": 0.0039, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 18.41860465116279, |
|
"grad_norm": 3.145096778869629, |
|
"learning_rate": 1.0029069767441861e-05, |
|
"loss": 0.0739, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 18.441860465116278, |
|
"grad_norm": 6.179558277130127, |
|
"learning_rate": 9.883720930232558e-06, |
|
"loss": 0.0333, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 18.46511627906977, |
|
"grad_norm": 0.10819542407989502, |
|
"learning_rate": 9.738372093023257e-06, |
|
"loss": 0.0023, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 18.488372093023255, |
|
"grad_norm": 11.038155555725098, |
|
"learning_rate": 9.593023255813954e-06, |
|
"loss": 0.0816, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 18.511627906976745, |
|
"grad_norm": 4.89321756362915, |
|
"learning_rate": 9.447674418604651e-06, |
|
"loss": 0.0669, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 18.53488372093023, |
|
"grad_norm": 0.6758410930633545, |
|
"learning_rate": 9.302325581395349e-06, |
|
"loss": 0.0124, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 18.558139534883722, |
|
"grad_norm": 2.737079381942749, |
|
"learning_rate": 9.156976744186046e-06, |
|
"loss": 0.0134, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 18.58139534883721, |
|
"grad_norm": 5.82487154006958, |
|
"learning_rate": 9.011627906976745e-06, |
|
"loss": 0.0826, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 18.6046511627907, |
|
"grad_norm": 0.12569262087345123, |
|
"learning_rate": 8.866279069767444e-06, |
|
"loss": 0.0021, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 18.6046511627907, |
|
"eval_accuracy": 0.8950819672131147, |
|
"eval_f1": 0.8945822172297591, |
|
"eval_loss": 0.41386428475379944, |
|
"eval_precision": 0.8966414996094767, |
|
"eval_recall": 0.8937710002584647, |
|
"eval_runtime": 0.074, |
|
"eval_samples_per_second": 4121.655, |
|
"eval_steps_per_second": 67.568, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 18.627906976744185, |
|
"grad_norm": 7.690613269805908, |
|
"learning_rate": 8.72093023255814e-06, |
|
"loss": 0.1334, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 18.651162790697676, |
|
"grad_norm": 7.942806720733643, |
|
"learning_rate": 8.575581395348838e-06, |
|
"loss": 0.0349, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 18.674418604651162, |
|
"grad_norm": 1.4096626043319702, |
|
"learning_rate": 8.430232558139535e-06, |
|
"loss": 0.0086, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 18.697674418604652, |
|
"grad_norm": 2.184845209121704, |
|
"learning_rate": 8.284883720930232e-06, |
|
"loss": 0.021, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 18.72093023255814, |
|
"grad_norm": 5.123547554016113, |
|
"learning_rate": 8.139534883720931e-06, |
|
"loss": 0.0714, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 18.74418604651163, |
|
"grad_norm": 2.141044855117798, |
|
"learning_rate": 7.994186046511628e-06, |
|
"loss": 0.0227, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 18.767441860465116, |
|
"grad_norm": 0.7044129967689514, |
|
"learning_rate": 7.848837209302325e-06, |
|
"loss": 0.0064, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 18.790697674418606, |
|
"grad_norm": 0.9102310538291931, |
|
"learning_rate": 7.703488372093023e-06, |
|
"loss": 0.0056, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 18.813953488372093, |
|
"grad_norm": 6.990550518035889, |
|
"learning_rate": 7.558139534883721e-06, |
|
"loss": 0.0359, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 18.837209302325583, |
|
"grad_norm": 0.36822423338890076, |
|
"learning_rate": 7.4127906976744195e-06, |
|
"loss": 0.0036, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 18.86046511627907, |
|
"grad_norm": 1.0232497453689575, |
|
"learning_rate": 7.267441860465117e-06, |
|
"loss": 0.0077, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 18.88372093023256, |
|
"grad_norm": 2.3726754188537598, |
|
"learning_rate": 7.122093023255815e-06, |
|
"loss": 0.0625, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 18.906976744186046, |
|
"grad_norm": 1.6161715984344482, |
|
"learning_rate": 6.976744186046512e-06, |
|
"loss": 0.015, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 18.930232558139537, |
|
"grad_norm": 1.230698823928833, |
|
"learning_rate": 6.831395348837209e-06, |
|
"loss": 0.0595, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 18.953488372093023, |
|
"grad_norm": 4.402613162994385, |
|
"learning_rate": 6.686046511627907e-06, |
|
"loss": 0.0847, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 18.97674418604651, |
|
"grad_norm": 6.6045966148376465, |
|
"learning_rate": 6.540697674418605e-06, |
|
"loss": 0.036, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"grad_norm": 0.6431266069412231, |
|
"learning_rate": 6.395348837209303e-06, |
|
"loss": 0.0043, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 19.023255813953487, |
|
"grad_norm": 0.4433208703994751, |
|
"learning_rate": 6.25e-06, |
|
"loss": 0.0032, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 19.046511627906977, |
|
"grad_norm": 3.6785125732421875, |
|
"learning_rate": 6.1046511627906975e-06, |
|
"loss": 0.1244, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 19.069767441860463, |
|
"grad_norm": 2.5291504859924316, |
|
"learning_rate": 5.9593023255813955e-06, |
|
"loss": 0.0071, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 19.069767441860463, |
|
"eval_accuracy": 0.9049180327868852, |
|
"eval_f1": 0.9046556500555143, |
|
"eval_loss": 0.3841802775859833, |
|
"eval_precision": 0.9051627384960719, |
|
"eval_recall": 0.9043249763074007, |
|
"eval_runtime": 0.0744, |
|
"eval_samples_per_second": 4097.049, |
|
"eval_steps_per_second": 67.165, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 19.093023255813954, |
|
"grad_norm": 8.134071350097656, |
|
"learning_rate": 5.8139534883720935e-06, |
|
"loss": 0.0308, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 19.11627906976744, |
|
"grad_norm": 0.23603279888629913, |
|
"learning_rate": 5.668604651162791e-06, |
|
"loss": 0.0028, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 19.13953488372093, |
|
"grad_norm": 5.337473392486572, |
|
"learning_rate": 5.523255813953489e-06, |
|
"loss": 0.034, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 19.162790697674417, |
|
"grad_norm": 4.512674808502197, |
|
"learning_rate": 5.377906976744186e-06, |
|
"loss": 0.0775, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 19.186046511627907, |
|
"grad_norm": 1.7216310501098633, |
|
"learning_rate": 5.232558139534884e-06, |
|
"loss": 0.0056, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 19.209302325581394, |
|
"grad_norm": 1.9352085590362549, |
|
"learning_rate": 5.087209302325582e-06, |
|
"loss": 0.0423, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 19.232558139534884, |
|
"grad_norm": 10.692374229431152, |
|
"learning_rate": 4.941860465116279e-06, |
|
"loss": 0.0782, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 19.25581395348837, |
|
"grad_norm": 0.10650653392076492, |
|
"learning_rate": 4.796511627906977e-06, |
|
"loss": 0.0017, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 19.27906976744186, |
|
"grad_norm": 0.14889536798000336, |
|
"learning_rate": 4.651162790697674e-06, |
|
"loss": 0.0024, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 19.302325581395348, |
|
"grad_norm": 5.617801666259766, |
|
"learning_rate": 4.505813953488372e-06, |
|
"loss": 0.0121, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 19.325581395348838, |
|
"grad_norm": 2.859057903289795, |
|
"learning_rate": 4.36046511627907e-06, |
|
"loss": 0.034, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 19.348837209302324, |
|
"grad_norm": 0.27750277519226074, |
|
"learning_rate": 4.2151162790697675e-06, |
|
"loss": 0.0044, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 19.372093023255815, |
|
"grad_norm": 0.04883375018835068, |
|
"learning_rate": 4.0697674418604655e-06, |
|
"loss": 0.0018, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 19.3953488372093, |
|
"grad_norm": 1.9595685005187988, |
|
"learning_rate": 3.924418604651163e-06, |
|
"loss": 0.011, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 19.41860465116279, |
|
"grad_norm": 1.273353099822998, |
|
"learning_rate": 3.7790697674418603e-06, |
|
"loss": 0.0214, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 19.441860465116278, |
|
"grad_norm": 6.320394039154053, |
|
"learning_rate": 3.6337209302325583e-06, |
|
"loss": 0.081, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 19.46511627906977, |
|
"grad_norm": 2.93029522895813, |
|
"learning_rate": 3.488372093023256e-06, |
|
"loss": 0.0256, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 19.488372093023255, |
|
"grad_norm": 0.4745703339576721, |
|
"learning_rate": 3.3430232558139535e-06, |
|
"loss": 0.0033, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 19.511627906976745, |
|
"grad_norm": 4.16102933883667, |
|
"learning_rate": 3.1976744186046516e-06, |
|
"loss": 0.0482, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 19.53488372093023, |
|
"grad_norm": 0.2739373743534088, |
|
"learning_rate": 3.0523255813953487e-06, |
|
"loss": 0.0045, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 19.53488372093023, |
|
"eval_accuracy": 0.9049180327868852, |
|
"eval_f1": 0.9046556500555143, |
|
"eval_loss": 0.38903769850730896, |
|
"eval_precision": 0.9051627384960719, |
|
"eval_recall": 0.9043249763074007, |
|
"eval_runtime": 0.0754, |
|
"eval_samples_per_second": 4042.913, |
|
"eval_steps_per_second": 66.277, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 19.558139534883722, |
|
"grad_norm": 0.87278813123703, |
|
"learning_rate": 2.9069767441860468e-06, |
|
"loss": 0.0116, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 19.58139534883721, |
|
"grad_norm": 0.7536394596099854, |
|
"learning_rate": 2.7616279069767444e-06, |
|
"loss": 0.0036, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 19.6046511627907, |
|
"grad_norm": 4.64411735534668, |
|
"learning_rate": 2.616279069767442e-06, |
|
"loss": 0.0712, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 19.627906976744185, |
|
"grad_norm": 0.5402886271476746, |
|
"learning_rate": 2.4709302325581396e-06, |
|
"loss": 0.0079, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 19.651162790697676, |
|
"grad_norm": 0.591332733631134, |
|
"learning_rate": 2.325581395348837e-06, |
|
"loss": 0.0028, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 19.674418604651162, |
|
"grad_norm": 1.511197805404663, |
|
"learning_rate": 2.180232558139535e-06, |
|
"loss": 0.0216, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 19.697674418604652, |
|
"grad_norm": 2.847646951675415, |
|
"learning_rate": 2.0348837209302328e-06, |
|
"loss": 0.0246, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 19.72093023255814, |
|
"grad_norm": 0.33959656953811646, |
|
"learning_rate": 1.8895348837209302e-06, |
|
"loss": 0.0034, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 19.74418604651163, |
|
"grad_norm": 1.8157447576522827, |
|
"learning_rate": 1.744186046511628e-06, |
|
"loss": 0.0436, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 19.767441860465116, |
|
"grad_norm": 0.3644404113292694, |
|
"learning_rate": 1.5988372093023258e-06, |
|
"loss": 0.0038, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 19.790697674418606, |
|
"grad_norm": 2.5243899822235107, |
|
"learning_rate": 1.4534883720930234e-06, |
|
"loss": 0.0115, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 19.813953488372093, |
|
"grad_norm": 5.748671054840088, |
|
"learning_rate": 1.308139534883721e-06, |
|
"loss": 0.04, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 19.837209302325583, |
|
"grad_norm": 0.5189786553382874, |
|
"learning_rate": 1.1627906976744186e-06, |
|
"loss": 0.0049, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 19.86046511627907, |
|
"grad_norm": 2.05354380607605, |
|
"learning_rate": 1.0174418604651164e-06, |
|
"loss": 0.0591, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 19.88372093023256, |
|
"grad_norm": 2.982142925262451, |
|
"learning_rate": 8.72093023255814e-07, |
|
"loss": 0.0224, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 19.906976744186046, |
|
"grad_norm": 2.0666937828063965, |
|
"learning_rate": 7.267441860465117e-07, |
|
"loss": 0.0217, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 19.930232558139537, |
|
"grad_norm": 4.0683722496032715, |
|
"learning_rate": 5.813953488372093e-07, |
|
"loss": 0.108, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 19.953488372093023, |
|
"grad_norm": 10.890898704528809, |
|
"learning_rate": 4.36046511627907e-07, |
|
"loss": 0.069, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 19.97674418604651, |
|
"grad_norm": 0.7343159914016724, |
|
"learning_rate": 2.9069767441860464e-07, |
|
"loss": 0.0071, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"grad_norm": 1.6608028411865234, |
|
"learning_rate": 1.4534883720930232e-07, |
|
"loss": 0.0411, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_accuracy": 0.8950819672131147, |
|
"eval_f1": 0.8946732280065612, |
|
"eval_loss": 0.3955073356628418, |
|
"eval_precision": 0.895995670995671, |
|
"eval_recall": 0.894051003704661, |
|
"eval_runtime": 0.076, |
|
"eval_samples_per_second": 4014.381, |
|
"eval_steps_per_second": 65.81, |
|
"step": 860 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 860, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 200, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 68001814271952.0, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|