|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9086778736937755, |
|
"eval_steps": 500, |
|
"global_step": 1000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0009086778736937755, |
|
"grad_norm": 2.601068120514824, |
|
"learning_rate": 0.0, |
|
"loss": 0.7699, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001817355747387551, |
|
"grad_norm": 2.545451150614842, |
|
"learning_rate": 5.454545454545455e-07, |
|
"loss": 0.7768, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0027260336210813267, |
|
"grad_norm": 2.3373370819915653, |
|
"learning_rate": 1.090909090909091e-06, |
|
"loss": 0.7253, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.003634711494775102, |
|
"grad_norm": 2.48299255257347, |
|
"learning_rate": 1.6363636363636363e-06, |
|
"loss": 0.7588, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.004543389368468878, |
|
"grad_norm": 2.3266026791411267, |
|
"learning_rate": 2.181818181818182e-06, |
|
"loss": 0.7339, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.005452067242162653, |
|
"grad_norm": 2.3111442162120097, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 0.7585, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.006360745115856429, |
|
"grad_norm": 1.839056170375473, |
|
"learning_rate": 3.2727272727272725e-06, |
|
"loss": 0.7157, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.007269422989550204, |
|
"grad_norm": 1.8715018574536233, |
|
"learning_rate": 3.818181818181818e-06, |
|
"loss": 0.741, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.00817810086324398, |
|
"grad_norm": 1.6703798180686897, |
|
"learning_rate": 4.363636363636364e-06, |
|
"loss": 0.7339, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.009086778736937756, |
|
"grad_norm": 1.3022359540872304, |
|
"learning_rate": 4.90909090909091e-06, |
|
"loss": 0.7058, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009995456610631531, |
|
"grad_norm": 1.1538753981218908, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 0.7158, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.010904134484325307, |
|
"grad_norm": 1.1727389823350258, |
|
"learning_rate": 6e-06, |
|
"loss": 0.7236, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.011812812358019082, |
|
"grad_norm": 1.7879982152520335, |
|
"learning_rate": 6.545454545454545e-06, |
|
"loss": 0.701, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.012721490231712857, |
|
"grad_norm": 1.8765006568093932, |
|
"learning_rate": 7.090909090909091e-06, |
|
"loss": 0.6837, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.013630168105406633, |
|
"grad_norm": 1.7017793683045053, |
|
"learning_rate": 7.636363636363636e-06, |
|
"loss": 0.6637, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.014538845979100408, |
|
"grad_norm": 1.5271998134211906, |
|
"learning_rate": 8.181818181818181e-06, |
|
"loss": 0.6793, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.015447523852794184, |
|
"grad_norm": 1.0560099784101875, |
|
"learning_rate": 8.727272727272728e-06, |
|
"loss": 0.6868, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01635620172648796, |
|
"grad_norm": 1.1802463918516672, |
|
"learning_rate": 9.272727272727273e-06, |
|
"loss": 0.6477, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.017264879600181735, |
|
"grad_norm": 1.4964829988488886, |
|
"learning_rate": 9.81818181818182e-06, |
|
"loss": 0.6848, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01817355747387551, |
|
"grad_norm": 1.0423576592539534, |
|
"learning_rate": 1.0363636363636364e-05, |
|
"loss": 0.6439, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.019082235347569285, |
|
"grad_norm": 0.8521617579985585, |
|
"learning_rate": 1.0909090909090909e-05, |
|
"loss": 0.6197, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.019990913221263062, |
|
"grad_norm": 0.6455522796912817, |
|
"learning_rate": 1.1454545454545455e-05, |
|
"loss": 0.6165, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.020899591094956836, |
|
"grad_norm": 0.6424805984129104, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.6447, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.021808268968650613, |
|
"grad_norm": 0.6452431266273964, |
|
"learning_rate": 1.2545454545454545e-05, |
|
"loss": 0.6248, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02271694684234439, |
|
"grad_norm": 0.6091998561509787, |
|
"learning_rate": 1.309090909090909e-05, |
|
"loss": 0.6082, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.023625624716038164, |
|
"grad_norm": 0.5899107852705936, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 0.6381, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02453430258973194, |
|
"grad_norm": 0.5221735021853063, |
|
"learning_rate": 1.4181818181818181e-05, |
|
"loss": 0.6375, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.025442980463425715, |
|
"grad_norm": 0.45451302628225215, |
|
"learning_rate": 1.4727272727272728e-05, |
|
"loss": 0.5926, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.026351658337119492, |
|
"grad_norm": 0.48697186829612116, |
|
"learning_rate": 1.5272727272727273e-05, |
|
"loss": 0.6094, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.027260336210813266, |
|
"grad_norm": 0.5191870380408595, |
|
"learning_rate": 1.5818181818181818e-05, |
|
"loss": 0.6339, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.028169014084507043, |
|
"grad_norm": 0.4558984969477796, |
|
"learning_rate": 1.6363636363636363e-05, |
|
"loss": 0.6045, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.029077691958200817, |
|
"grad_norm": 0.47078492141998163, |
|
"learning_rate": 1.6909090909090907e-05, |
|
"loss": 0.6168, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.029986369831894594, |
|
"grad_norm": 0.40599045996309796, |
|
"learning_rate": 1.7454545454545456e-05, |
|
"loss": 0.5991, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.030895047705588367, |
|
"grad_norm": 0.394694958480182, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.5896, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03180372557928214, |
|
"grad_norm": 0.3940540280992479, |
|
"learning_rate": 1.8545454545454545e-05, |
|
"loss": 0.5873, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03271240345297592, |
|
"grad_norm": 0.443078986005425, |
|
"learning_rate": 1.909090909090909e-05, |
|
"loss": 0.5893, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.033621081326669695, |
|
"grad_norm": 0.40061462050322844, |
|
"learning_rate": 1.963636363636364e-05, |
|
"loss": 0.6071, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.03452975920036347, |
|
"grad_norm": 0.3651344870337699, |
|
"learning_rate": 2.0181818181818183e-05, |
|
"loss": 0.5952, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.03543843707405725, |
|
"grad_norm": 0.3574739663311614, |
|
"learning_rate": 2.0727272727272728e-05, |
|
"loss": 0.5849, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.03634711494775102, |
|
"grad_norm": 0.39438420805485946, |
|
"learning_rate": 2.1272727272727273e-05, |
|
"loss": 0.5874, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0372557928214448, |
|
"grad_norm": 0.38235263642566175, |
|
"learning_rate": 2.1818181818181818e-05, |
|
"loss": 0.589, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.03816447069513857, |
|
"grad_norm": 0.3166953970319303, |
|
"learning_rate": 2.2363636363636366e-05, |
|
"loss": 0.5442, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03907314856883235, |
|
"grad_norm": 0.3831322680208187, |
|
"learning_rate": 2.290909090909091e-05, |
|
"loss": 0.5866, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.039981826442526125, |
|
"grad_norm": 0.40298176888334175, |
|
"learning_rate": 2.3454545454545456e-05, |
|
"loss": 0.5882, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0408905043162199, |
|
"grad_norm": 0.3299018047565981, |
|
"learning_rate": 2.4e-05, |
|
"loss": 0.5837, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04179918218991367, |
|
"grad_norm": 0.35755407226914043, |
|
"learning_rate": 2.454545454545455e-05, |
|
"loss": 0.6004, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.04270786006360745, |
|
"grad_norm": 0.40629650524300004, |
|
"learning_rate": 2.509090909090909e-05, |
|
"loss": 0.585, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04361653793730123, |
|
"grad_norm": 0.35063710725122565, |
|
"learning_rate": 2.5636363636363635e-05, |
|
"loss": 0.556, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.044525215810995, |
|
"grad_norm": 0.34503154628666083, |
|
"learning_rate": 2.618181818181818e-05, |
|
"loss": 0.5638, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.04543389368468878, |
|
"grad_norm": 0.41060546364625117, |
|
"learning_rate": 2.6727272727272728e-05, |
|
"loss": 0.5799, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.046342571558382555, |
|
"grad_norm": 0.4194490541584686, |
|
"learning_rate": 2.7272727272727273e-05, |
|
"loss": 0.5741, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04725124943207633, |
|
"grad_norm": 0.3500886645834267, |
|
"learning_rate": 2.7818181818181818e-05, |
|
"loss": 0.5821, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0481599273057701, |
|
"grad_norm": 0.3835932551976908, |
|
"learning_rate": 2.8363636363636363e-05, |
|
"loss": 0.5959, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.04906860517946388, |
|
"grad_norm": 0.3416728291114892, |
|
"learning_rate": 2.890909090909091e-05, |
|
"loss": 0.6064, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.049977283053157656, |
|
"grad_norm": 0.34404874346555325, |
|
"learning_rate": 2.9454545454545456e-05, |
|
"loss": 0.5755, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05088596092685143, |
|
"grad_norm": 0.3710765456432596, |
|
"learning_rate": 3e-05, |
|
"loss": 0.5679, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.051794638800545204, |
|
"grad_norm": 0.29938282115592735, |
|
"learning_rate": 2.9999932215858376e-05, |
|
"loss": 0.5694, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.052703316674238984, |
|
"grad_norm": 0.35057253497259383, |
|
"learning_rate": 2.9999728864046126e-05, |
|
"loss": 0.5538, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.05361199454793276, |
|
"grad_norm": 0.30805671744258767, |
|
"learning_rate": 2.9999389946401123e-05, |
|
"loss": 0.5781, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.05452067242162653, |
|
"grad_norm": 0.3482706906480643, |
|
"learning_rate": 2.9998915465986464e-05, |
|
"loss": 0.5794, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05542935029532031, |
|
"grad_norm": 0.337361409759639, |
|
"learning_rate": 2.999830542709045e-05, |
|
"loss": 0.5689, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.056338028169014086, |
|
"grad_norm": 0.3082843990483217, |
|
"learning_rate": 2.9997559835226545e-05, |
|
"loss": 0.5574, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.05724670604270786, |
|
"grad_norm": 0.3440993015680002, |
|
"learning_rate": 2.9996678697133317e-05, |
|
"loss": 0.5355, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.05815538391640163, |
|
"grad_norm": 0.3505726852328007, |
|
"learning_rate": 2.9995662020774395e-05, |
|
"loss": 0.5758, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.059064061790095414, |
|
"grad_norm": 0.3578683792155986, |
|
"learning_rate": 2.999450981533838e-05, |
|
"loss": 0.5576, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.05997273966378919, |
|
"grad_norm": 0.305534429017613, |
|
"learning_rate": 2.999322209123878e-05, |
|
"loss": 0.5611, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.06088141753748296, |
|
"grad_norm": 0.3461569434390687, |
|
"learning_rate": 2.999179886011389e-05, |
|
"loss": 0.5681, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.061790095411176735, |
|
"grad_norm": 0.36951699442069197, |
|
"learning_rate": 2.999024013482672e-05, |
|
"loss": 0.5546, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.06269877328487052, |
|
"grad_norm": 0.30035631327373347, |
|
"learning_rate": 2.9988545929464837e-05, |
|
"loss": 0.5641, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.06360745115856428, |
|
"grad_norm": 0.3147469851517606, |
|
"learning_rate": 2.9986716259340288e-05, |
|
"loss": 0.564, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06451612903225806, |
|
"grad_norm": 0.3287019882975828, |
|
"learning_rate": 2.9984751140989417e-05, |
|
"loss": 0.5585, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.06542480690595184, |
|
"grad_norm": 0.3262923611974805, |
|
"learning_rate": 2.9982650592172738e-05, |
|
"loss": 0.5788, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.06633348477964561, |
|
"grad_norm": 0.30113514428264637, |
|
"learning_rate": 2.998041463187477e-05, |
|
"loss": 0.5397, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.06724216265333939, |
|
"grad_norm": 0.34137667654270437, |
|
"learning_rate": 2.9978043280303867e-05, |
|
"loss": 0.5695, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.06815084052703317, |
|
"grad_norm": 0.30136381991899835, |
|
"learning_rate": 2.9975536558892034e-05, |
|
"loss": 0.5574, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06905951840072694, |
|
"grad_norm": 0.3029251079889505, |
|
"learning_rate": 2.9972894490294738e-05, |
|
"loss": 0.5519, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.06996819627442072, |
|
"grad_norm": 0.34180753067947, |
|
"learning_rate": 2.9970117098390682e-05, |
|
"loss": 0.5661, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0708768741481145, |
|
"grad_norm": 0.3679070731789738, |
|
"learning_rate": 2.9967204408281618e-05, |
|
"loss": 0.5775, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.07178555202180827, |
|
"grad_norm": 0.29755135710898023, |
|
"learning_rate": 2.9964156446292112e-05, |
|
"loss": 0.5512, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.07269422989550205, |
|
"grad_norm": 0.3237156945925559, |
|
"learning_rate": 2.9960973239969295e-05, |
|
"loss": 0.5807, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07360290776919581, |
|
"grad_norm": 0.28459486097868475, |
|
"learning_rate": 2.9957654818082615e-05, |
|
"loss": 0.5397, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.0745115856428896, |
|
"grad_norm": 0.3299635371208699, |
|
"learning_rate": 2.9954201210623594e-05, |
|
"loss": 0.5742, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.07542026351658337, |
|
"grad_norm": 0.29790933007572773, |
|
"learning_rate": 2.995061244880554e-05, |
|
"loss": 0.5498, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.07632894139027714, |
|
"grad_norm": 0.310578411796522, |
|
"learning_rate": 2.994688856506327e-05, |
|
"loss": 0.5772, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.07723761926397092, |
|
"grad_norm": 0.2830093354841121, |
|
"learning_rate": 2.9943029593052822e-05, |
|
"loss": 0.5612, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.0781462971376647, |
|
"grad_norm": 0.31044824437431984, |
|
"learning_rate": 2.9939035567651146e-05, |
|
"loss": 0.5494, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.07905497501135847, |
|
"grad_norm": 0.26786587945077706, |
|
"learning_rate": 2.9934906524955777e-05, |
|
"loss": 0.5115, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.07996365288505225, |
|
"grad_norm": 0.3128021372443758, |
|
"learning_rate": 2.9930642502284537e-05, |
|
"loss": 0.5531, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.08087233075874603, |
|
"grad_norm": 0.2775957207658622, |
|
"learning_rate": 2.9926243538175172e-05, |
|
"loss": 0.5387, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.0817810086324398, |
|
"grad_norm": 0.27919195499528654, |
|
"learning_rate": 2.992170967238502e-05, |
|
"loss": 0.5528, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08268968650613358, |
|
"grad_norm": 0.3078667562238638, |
|
"learning_rate": 2.9917040945890638e-05, |
|
"loss": 0.545, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.08359836437982734, |
|
"grad_norm": 0.32246942789494776, |
|
"learning_rate": 2.9912237400887442e-05, |
|
"loss": 0.5741, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.08450704225352113, |
|
"grad_norm": 0.31869933415834445, |
|
"learning_rate": 2.9907299080789326e-05, |
|
"loss": 0.5655, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0854157201272149, |
|
"grad_norm": 0.26749345526391427, |
|
"learning_rate": 2.9902226030228252e-05, |
|
"loss": 0.5255, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.08632439800090867, |
|
"grad_norm": 0.32883178074329494, |
|
"learning_rate": 2.9897018295053883e-05, |
|
"loss": 0.5249, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08723307587460245, |
|
"grad_norm": 0.3232345235969363, |
|
"learning_rate": 2.9891675922333125e-05, |
|
"loss": 0.5692, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.08814175374829623, |
|
"grad_norm": 0.3548603927764629, |
|
"learning_rate": 2.9886198960349733e-05, |
|
"loss": 0.5688, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.08905043162199, |
|
"grad_norm": 0.4796806813236084, |
|
"learning_rate": 2.9880587458603862e-05, |
|
"loss": 0.5615, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.08995910949568378, |
|
"grad_norm": 0.3048044136414707, |
|
"learning_rate": 2.9874841467811624e-05, |
|
"loss": 0.5455, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.09086778736937756, |
|
"grad_norm": 0.3345696458929397, |
|
"learning_rate": 2.9868961039904628e-05, |
|
"loss": 0.5521, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09177646524307133, |
|
"grad_norm": 0.29613055665568655, |
|
"learning_rate": 2.9862946228029507e-05, |
|
"loss": 0.5381, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.09268514311676511, |
|
"grad_norm": 0.3088460179022418, |
|
"learning_rate": 2.9856797086547435e-05, |
|
"loss": 0.5585, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.09359382099045888, |
|
"grad_norm": 0.34901311274948843, |
|
"learning_rate": 2.9850513671033664e-05, |
|
"loss": 0.573, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.09450249886415266, |
|
"grad_norm": 0.352416635014394, |
|
"learning_rate": 2.984409603827697e-05, |
|
"loss": 0.5635, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.09541117673784644, |
|
"grad_norm": 0.3207125359081875, |
|
"learning_rate": 2.983754424627919e-05, |
|
"loss": 0.5215, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.0963198546115402, |
|
"grad_norm": 0.31749439598402857, |
|
"learning_rate": 2.9830858354254672e-05, |
|
"loss": 0.541, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.09722853248523398, |
|
"grad_norm": 0.31449476495281475, |
|
"learning_rate": 2.9824038422629737e-05, |
|
"loss": 0.5506, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.09813721035892777, |
|
"grad_norm": 0.44629956406034843, |
|
"learning_rate": 2.9817084513042153e-05, |
|
"loss": 0.549, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.09904588823262153, |
|
"grad_norm": 0.3066824935289338, |
|
"learning_rate": 2.9809996688340552e-05, |
|
"loss": 0.5445, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.09995456610631531, |
|
"grad_norm": 0.34715367523179214, |
|
"learning_rate": 2.9802775012583884e-05, |
|
"loss": 0.5596, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.10086324398000909, |
|
"grad_norm": 0.30888532714559946, |
|
"learning_rate": 2.9795419551040836e-05, |
|
"loss": 0.556, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.10177192185370286, |
|
"grad_norm": 0.3527630189050702, |
|
"learning_rate": 2.978793037018922e-05, |
|
"loss": 0.5548, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.10268059972739664, |
|
"grad_norm": 0.3161408926482892, |
|
"learning_rate": 2.9780307537715396e-05, |
|
"loss": 0.5626, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.10358927760109041, |
|
"grad_norm": 0.28890312139326835, |
|
"learning_rate": 2.9772551122513652e-05, |
|
"loss": 0.5507, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.10449795547478419, |
|
"grad_norm": 0.3229834707417106, |
|
"learning_rate": 2.9764661194685583e-05, |
|
"loss": 0.5564, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.10540663334847797, |
|
"grad_norm": 0.31937480406323543, |
|
"learning_rate": 2.9756637825539453e-05, |
|
"loss": 0.5416, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.10631531122217174, |
|
"grad_norm": 0.28663393401926945, |
|
"learning_rate": 2.9748481087589552e-05, |
|
"loss": 0.5583, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.10722398909586552, |
|
"grad_norm": 0.2974995696933711, |
|
"learning_rate": 2.974019105455554e-05, |
|
"loss": 0.5499, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.1081326669695593, |
|
"grad_norm": 0.29684178337584144, |
|
"learning_rate": 2.97317678013618e-05, |
|
"loss": 0.5609, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.10904134484325306, |
|
"grad_norm": 0.3262506812045903, |
|
"learning_rate": 2.972321140413672e-05, |
|
"loss": 0.5455, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10995002271694684, |
|
"grad_norm": 0.3092601543840332, |
|
"learning_rate": 2.971452194021204e-05, |
|
"loss": 0.551, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.11085870059064062, |
|
"grad_norm": 0.4005513894771114, |
|
"learning_rate": 2.970569948812214e-05, |
|
"loss": 0.5591, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.11176737846433439, |
|
"grad_norm": 0.327190640093946, |
|
"learning_rate": 2.969674412760334e-05, |
|
"loss": 0.5428, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.11267605633802817, |
|
"grad_norm": 0.3155091476237132, |
|
"learning_rate": 2.968765593959315e-05, |
|
"loss": 0.5497, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.11358473421172194, |
|
"grad_norm": 0.3624314389132723, |
|
"learning_rate": 2.9678435006229585e-05, |
|
"loss": 0.5599, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.11449341208541572, |
|
"grad_norm": 0.2696492296407551, |
|
"learning_rate": 2.9669081410850378e-05, |
|
"loss": 0.5466, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.1154020899591095, |
|
"grad_norm": 0.35239441892045026, |
|
"learning_rate": 2.9659595237992256e-05, |
|
"loss": 0.5677, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.11631076783280327, |
|
"grad_norm": 0.2770977804416505, |
|
"learning_rate": 2.9649976573390168e-05, |
|
"loss": 0.5551, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.11721944570649705, |
|
"grad_norm": 0.317546939102512, |
|
"learning_rate": 2.9640225503976495e-05, |
|
"loss": 0.5493, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.11812812358019083, |
|
"grad_norm": 0.29901361868733123, |
|
"learning_rate": 2.9630342117880293e-05, |
|
"loss": 0.5512, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1190368014538846, |
|
"grad_norm": 0.34796394305794476, |
|
"learning_rate": 2.9620326504426476e-05, |
|
"loss": 0.5449, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.11994547932757837, |
|
"grad_norm": 0.3189637752329797, |
|
"learning_rate": 2.9610178754135005e-05, |
|
"loss": 0.5352, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.12085415720127216, |
|
"grad_norm": 0.3621323294601048, |
|
"learning_rate": 2.9599898958720088e-05, |
|
"loss": 0.5758, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.12176283507496592, |
|
"grad_norm": 0.3017668854168288, |
|
"learning_rate": 2.958948721108934e-05, |
|
"loss": 0.5524, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.1226715129486597, |
|
"grad_norm": 0.3019819675954715, |
|
"learning_rate": 2.957894360534295e-05, |
|
"loss": 0.5408, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.12358019082235347, |
|
"grad_norm": 0.29500722376897576, |
|
"learning_rate": 2.9568268236772816e-05, |
|
"loss": 0.5461, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.12448886869604725, |
|
"grad_norm": 0.4193960921261131, |
|
"learning_rate": 2.955746120186169e-05, |
|
"loss": 0.5355, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.12539754656974103, |
|
"grad_norm": 0.2750383897904465, |
|
"learning_rate": 2.9546522598282325e-05, |
|
"loss": 0.5596, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.1263062244434348, |
|
"grad_norm": 0.34998374547584626, |
|
"learning_rate": 2.953545252489657e-05, |
|
"loss": 0.5489, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.12721490231712856, |
|
"grad_norm": 0.2701830779505992, |
|
"learning_rate": 2.9524251081754475e-05, |
|
"loss": 0.5412, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12812358019082234, |
|
"grad_norm": 0.29400344926785826, |
|
"learning_rate": 2.9512918370093407e-05, |
|
"loss": 0.5541, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.12903225806451613, |
|
"grad_norm": 0.2684006986647009, |
|
"learning_rate": 2.9501454492337107e-05, |
|
"loss": 0.5249, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.1299409359382099, |
|
"grad_norm": 0.34974547659671557, |
|
"learning_rate": 2.9489859552094806e-05, |
|
"loss": 0.5722, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.1308496138119037, |
|
"grad_norm": 0.2911364725934339, |
|
"learning_rate": 2.947813365416023e-05, |
|
"loss": 0.534, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.13175829168559747, |
|
"grad_norm": 0.2685521574942707, |
|
"learning_rate": 2.9466276904510713e-05, |
|
"loss": 0.5282, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.13266696955929122, |
|
"grad_norm": 0.27716261776006357, |
|
"learning_rate": 2.9454289410306202e-05, |
|
"loss": 0.5559, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.133575647432985, |
|
"grad_norm": 0.32403029678097417, |
|
"learning_rate": 2.9442171279888286e-05, |
|
"loss": 0.5562, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.13448432530667878, |
|
"grad_norm": 0.29914381494298525, |
|
"learning_rate": 2.942992262277926e-05, |
|
"loss": 0.5498, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.13539300318037256, |
|
"grad_norm": 0.2999453587781109, |
|
"learning_rate": 2.9417543549681067e-05, |
|
"loss": 0.5512, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.13630168105406634, |
|
"grad_norm": 0.33912555060096045, |
|
"learning_rate": 2.9405034172474363e-05, |
|
"loss": 0.5513, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1372103589277601, |
|
"grad_norm": 0.2786003018351176, |
|
"learning_rate": 2.939239460421746e-05, |
|
"loss": 0.5357, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.13811903680145388, |
|
"grad_norm": 0.31469991721983587, |
|
"learning_rate": 2.937962495914534e-05, |
|
"loss": 0.5471, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.13902771467514766, |
|
"grad_norm": 0.3238481784334065, |
|
"learning_rate": 2.9366725352668584e-05, |
|
"loss": 0.5345, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.13993639254884144, |
|
"grad_norm": 0.28913738590561583, |
|
"learning_rate": 2.9353695901372363e-05, |
|
"loss": 0.5413, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.14084507042253522, |
|
"grad_norm": 0.2985726097232229, |
|
"learning_rate": 2.9340536723015367e-05, |
|
"loss": 0.5422, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.141753748296229, |
|
"grad_norm": 0.3520712671470228, |
|
"learning_rate": 2.9327247936528742e-05, |
|
"loss": 0.5137, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.14266242616992275, |
|
"grad_norm": 0.27128214863020367, |
|
"learning_rate": 2.931382966201502e-05, |
|
"loss": 0.5434, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.14357110404361653, |
|
"grad_norm": 0.29607019876927254, |
|
"learning_rate": 2.930028202074703e-05, |
|
"loss": 0.5333, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.1444797819173103, |
|
"grad_norm": 0.3443171478381605, |
|
"learning_rate": 2.9286605135166806e-05, |
|
"loss": 0.5429, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.1453884597910041, |
|
"grad_norm": 0.2724745024955907, |
|
"learning_rate": 2.927279912888447e-05, |
|
"loss": 0.5436, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.14629713766469787, |
|
"grad_norm": 0.32926171965555123, |
|
"learning_rate": 2.9258864126677132e-05, |
|
"loss": 0.5457, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.14720581553839163, |
|
"grad_norm": 0.26119164354842045, |
|
"learning_rate": 2.9244800254487744e-05, |
|
"loss": 0.5366, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.1481144934120854, |
|
"grad_norm": 0.3048750099033153, |
|
"learning_rate": 2.9230607639423973e-05, |
|
"loss": 0.539, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.1490231712857792, |
|
"grad_norm": 0.3235214935466523, |
|
"learning_rate": 2.9216286409757052e-05, |
|
"loss": 0.5447, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.14993184915947297, |
|
"grad_norm": 0.37561127087178964, |
|
"learning_rate": 2.920183669492061e-05, |
|
"loss": 0.5611, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.15084052703316675, |
|
"grad_norm": 0.300540850326533, |
|
"learning_rate": 2.9187258625509518e-05, |
|
"loss": 0.5367, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.15174920490686053, |
|
"grad_norm": 0.3576135446433189, |
|
"learning_rate": 2.9172552333278708e-05, |
|
"loss": 0.5337, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.15265788278055428, |
|
"grad_norm": 0.31274340375293364, |
|
"learning_rate": 2.9157717951141953e-05, |
|
"loss": 0.5381, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.15356656065424806, |
|
"grad_norm": 0.3352810642235237, |
|
"learning_rate": 2.914275561317071e-05, |
|
"loss": 0.548, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.15447523852794184, |
|
"grad_norm": 0.30963404928391625, |
|
"learning_rate": 2.9127665454592872e-05, |
|
"loss": 0.5345, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.15538391640163562, |
|
"grad_norm": 0.3071782564022272, |
|
"learning_rate": 2.9112447611791563e-05, |
|
"loss": 0.5298, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.1562925942753294, |
|
"grad_norm": 0.29846884519248473, |
|
"learning_rate": 2.9097102222303914e-05, |
|
"loss": 0.5373, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.15720127214902316, |
|
"grad_norm": 0.32363500369528425, |
|
"learning_rate": 2.9081629424819792e-05, |
|
"loss": 0.5419, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.15810995002271694, |
|
"grad_norm": 0.2979588134888228, |
|
"learning_rate": 2.906602935918057e-05, |
|
"loss": 0.5481, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.15901862789641072, |
|
"grad_norm": 0.30814351362966413, |
|
"learning_rate": 2.9050302166377858e-05, |
|
"loss": 0.5226, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1599273057701045, |
|
"grad_norm": 0.2842362351308814, |
|
"learning_rate": 2.9034447988552227e-05, |
|
"loss": 0.5466, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.16083598364379828, |
|
"grad_norm": 0.30495693391974155, |
|
"learning_rate": 2.9018466968991913e-05, |
|
"loss": 0.5293, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.16174466151749206, |
|
"grad_norm": 0.2972660949512962, |
|
"learning_rate": 2.9002359252131547e-05, |
|
"loss": 0.5373, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.1626533393911858, |
|
"grad_norm": 0.25956110689877127, |
|
"learning_rate": 2.8986124983550836e-05, |
|
"loss": 0.5409, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.1635620172648796, |
|
"grad_norm": 0.28704003352350194, |
|
"learning_rate": 2.896976430997323e-05, |
|
"loss": 0.5367, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.16447069513857338, |
|
"grad_norm": 0.27494590106809935, |
|
"learning_rate": 2.8953277379264633e-05, |
|
"loss": 0.5245, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.16537937301226716, |
|
"grad_norm": 0.29025723129556125, |
|
"learning_rate": 2.8936664340432033e-05, |
|
"loss": 0.5634, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.16628805088596094, |
|
"grad_norm": 0.2640721729777581, |
|
"learning_rate": 2.891992534362218e-05, |
|
"loss": 0.5575, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.1671967287596547, |
|
"grad_norm": 0.28770467313834736, |
|
"learning_rate": 2.8903060540120203e-05, |
|
"loss": 0.5526, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.16810540663334847, |
|
"grad_norm": 0.267891776899334, |
|
"learning_rate": 2.8886070082348268e-05, |
|
"loss": 0.5475, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.16901408450704225, |
|
"grad_norm": 0.3035468603633203, |
|
"learning_rate": 2.8868954123864194e-05, |
|
"loss": 0.5288, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.16992276238073603, |
|
"grad_norm": 0.2597479657341249, |
|
"learning_rate": 2.885171281936005e-05, |
|
"loss": 0.5221, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.1708314402544298, |
|
"grad_norm": 0.30549990341073835, |
|
"learning_rate": 2.883434632466077e-05, |
|
"loss": 0.5241, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.1717401181281236, |
|
"grad_norm": 0.3097670337344422, |
|
"learning_rate": 2.8816854796722754e-05, |
|
"loss": 0.5567, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.17264879600181735, |
|
"grad_norm": 0.24887750502737235, |
|
"learning_rate": 2.879923839363242e-05, |
|
"loss": 0.554, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.17355747387551113, |
|
"grad_norm": 0.30369106518383837, |
|
"learning_rate": 2.878149727460481e-05, |
|
"loss": 0.5322, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.1744661517492049, |
|
"grad_norm": 0.2734596089931484, |
|
"learning_rate": 2.8763631599982126e-05, |
|
"loss": 0.5345, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.1753748296228987, |
|
"grad_norm": 0.29225021074746216, |
|
"learning_rate": 2.874564153123228e-05, |
|
"loss": 0.5419, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.17628350749659247, |
|
"grad_norm": 0.3002278845544173, |
|
"learning_rate": 2.8727527230947473e-05, |
|
"loss": 0.5326, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.17719218537028622, |
|
"grad_norm": 0.2794201841833264, |
|
"learning_rate": 2.870928886284267e-05, |
|
"loss": 0.5251, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.17810086324398, |
|
"grad_norm": 0.2811482022282061, |
|
"learning_rate": 2.8690926591754142e-05, |
|
"loss": 0.5485, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.17900954111767378, |
|
"grad_norm": 0.2911471546431618, |
|
"learning_rate": 2.8672440583638013e-05, |
|
"loss": 0.5407, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.17991821899136756, |
|
"grad_norm": 0.2730896343056817, |
|
"learning_rate": 2.86538310055687e-05, |
|
"loss": 0.5047, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.18082689686506134, |
|
"grad_norm": 0.3138295451241485, |
|
"learning_rate": 2.863509802573744e-05, |
|
"loss": 0.558, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.18173557473875512, |
|
"grad_norm": 0.30518868956972217, |
|
"learning_rate": 2.8616241813450755e-05, |
|
"loss": 0.533, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.18264425261244888, |
|
"grad_norm": 0.2746559666747863, |
|
"learning_rate": 2.8597262539128947e-05, |
|
"loss": 0.5207, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.18355293048614266, |
|
"grad_norm": 0.2982492469468351, |
|
"learning_rate": 2.857816037430451e-05, |
|
"loss": 0.5309, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.18446160835983644, |
|
"grad_norm": 0.2819015087920199, |
|
"learning_rate": 2.8558935491620634e-05, |
|
"loss": 0.5414, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.18537028623353022, |
|
"grad_norm": 0.2831047957568758, |
|
"learning_rate": 2.8539588064829605e-05, |
|
"loss": 0.5539, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.186278964107224, |
|
"grad_norm": 0.3451150482089612, |
|
"learning_rate": 2.852011826879125e-05, |
|
"loss": 0.5331, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.18718764198091775, |
|
"grad_norm": 0.2525444743911961, |
|
"learning_rate": 2.8500526279471362e-05, |
|
"loss": 0.5272, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.18809631985461153, |
|
"grad_norm": 0.29713374816520677, |
|
"learning_rate": 2.8480812273940097e-05, |
|
"loss": 0.5526, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.1890049977283053, |
|
"grad_norm": 0.2685082552759803, |
|
"learning_rate": 2.8460976430370375e-05, |
|
"loss": 0.5114, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.1899136756019991, |
|
"grad_norm": 0.2784798341003174, |
|
"learning_rate": 2.8441018928036287e-05, |
|
"loss": 0.5204, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.19082235347569287, |
|
"grad_norm": 0.30091412799694445, |
|
"learning_rate": 2.8420939947311454e-05, |
|
"loss": 0.5371, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.19173103134938665, |
|
"grad_norm": 0.28284608142963674, |
|
"learning_rate": 2.84007396696674e-05, |
|
"loss": 0.5274, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.1926397092230804, |
|
"grad_norm": 0.3282616006754972, |
|
"learning_rate": 2.8380418277671928e-05, |
|
"loss": 0.5319, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.1935483870967742, |
|
"grad_norm": 0.26486949289774536, |
|
"learning_rate": 2.8359975954987453e-05, |
|
"loss": 0.5319, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.19445706497046797, |
|
"grad_norm": 0.3161233971442626, |
|
"learning_rate": 2.8339412886369345e-05, |
|
"loss": 0.5362, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.19536574284416175, |
|
"grad_norm": 0.299905638785644, |
|
"learning_rate": 2.8318729257664265e-05, |
|
"loss": 0.5632, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.19627442071785553, |
|
"grad_norm": 0.27636515077908513, |
|
"learning_rate": 2.8297925255808484e-05, |
|
"loss": 0.5359, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.19718309859154928, |
|
"grad_norm": 0.33338932554796485, |
|
"learning_rate": 2.8277001068826187e-05, |
|
"loss": 0.5421, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.19809177646524306, |
|
"grad_norm": 0.27956404676663693, |
|
"learning_rate": 2.8255956885827786e-05, |
|
"loss": 0.5309, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.19900045433893684, |
|
"grad_norm": 0.28929701016126264, |
|
"learning_rate": 2.8234792897008194e-05, |
|
"loss": 0.5482, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.19990913221263062, |
|
"grad_norm": 0.2937742621819467, |
|
"learning_rate": 2.821350929364512e-05, |
|
"loss": 0.537, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2008178100863244, |
|
"grad_norm": 0.27127495025152987, |
|
"learning_rate": 2.8192106268097336e-05, |
|
"loss": 0.5271, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.20172648796001819, |
|
"grad_norm": 0.26418445759257114, |
|
"learning_rate": 2.817058401380294e-05, |
|
"loss": 0.5251, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.20263516583371194, |
|
"grad_norm": 0.2858673177684851, |
|
"learning_rate": 2.81489427252776e-05, |
|
"loss": 0.5339, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.20354384370740572, |
|
"grad_norm": 0.25280337401887915, |
|
"learning_rate": 2.8127182598112805e-05, |
|
"loss": 0.5293, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.2044525215810995, |
|
"grad_norm": 0.29393678976163556, |
|
"learning_rate": 2.8105303828974095e-05, |
|
"loss": 0.5205, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.20536119945479328, |
|
"grad_norm": 0.27668804080319737, |
|
"learning_rate": 2.8083306615599283e-05, |
|
"loss": 0.5596, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.20626987732848706, |
|
"grad_norm": 0.2579574523472542, |
|
"learning_rate": 2.8061191156796658e-05, |
|
"loss": 0.5292, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.20717855520218081, |
|
"grad_norm": 0.2965602985642256, |
|
"learning_rate": 2.8038957652443217e-05, |
|
"loss": 0.5408, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.2080872330758746, |
|
"grad_norm": 0.2584149229749058, |
|
"learning_rate": 2.801660630348282e-05, |
|
"loss": 0.5219, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.20899591094956838, |
|
"grad_norm": 0.23617123717584307, |
|
"learning_rate": 2.79941373119244e-05, |
|
"loss": 0.5091, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.20990458882326216, |
|
"grad_norm": 0.29559409833291433, |
|
"learning_rate": 2.7971550880840138e-05, |
|
"loss": 0.5252, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.21081326669695594, |
|
"grad_norm": 0.25262076589345694, |
|
"learning_rate": 2.794884721436361e-05, |
|
"loss": 0.5322, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.21172194457064972, |
|
"grad_norm": 0.2861284017299984, |
|
"learning_rate": 2.792602651768795e-05, |
|
"loss": 0.519, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.21263062244434347, |
|
"grad_norm": 0.2940791172624826, |
|
"learning_rate": 2.790308899706401e-05, |
|
"loss": 0.5319, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.21353930031803725, |
|
"grad_norm": 0.24143923336501463, |
|
"learning_rate": 2.7880034859798476e-05, |
|
"loss": 0.5161, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.21444797819173103, |
|
"grad_norm": 0.2995164890461248, |
|
"learning_rate": 2.7856864314251994e-05, |
|
"loss": 0.5168, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.2153566560654248, |
|
"grad_norm": 0.25742178571208674, |
|
"learning_rate": 2.783357756983731e-05, |
|
"loss": 0.5534, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.2162653339391186, |
|
"grad_norm": 0.29245573697234445, |
|
"learning_rate": 2.781017483701735e-05, |
|
"loss": 0.5388, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.21717401181281235, |
|
"grad_norm": 0.2674112998244982, |
|
"learning_rate": 2.7786656327303342e-05, |
|
"loss": 0.5605, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.21808268968650613, |
|
"grad_norm": 0.263962722421515, |
|
"learning_rate": 2.7763022253252882e-05, |
|
"loss": 0.5283, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2189913675601999, |
|
"grad_norm": 0.2960952065569361, |
|
"learning_rate": 2.7739272828468022e-05, |
|
"loss": 0.5346, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.2199000454338937, |
|
"grad_norm": 0.2564971928759544, |
|
"learning_rate": 2.771540826759335e-05, |
|
"loss": 0.5258, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.22080872330758747, |
|
"grad_norm": 0.38221953093331257, |
|
"learning_rate": 2.769142878631403e-05, |
|
"loss": 0.5386, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.22171740118128125, |
|
"grad_norm": 0.2567622031991432, |
|
"learning_rate": 2.766733460135388e-05, |
|
"loss": 0.5267, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.222626079054975, |
|
"grad_norm": 0.2810737233839325, |
|
"learning_rate": 2.7643125930473375e-05, |
|
"loss": 0.5378, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.22353475692866878, |
|
"grad_norm": 0.24712524183510226, |
|
"learning_rate": 2.7618802992467718e-05, |
|
"loss": 0.5129, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.22444343480236256, |
|
"grad_norm": 0.2581811828213249, |
|
"learning_rate": 2.759436600716484e-05, |
|
"loss": 0.5475, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.22535211267605634, |
|
"grad_norm": 0.26523091241064817, |
|
"learning_rate": 2.756981519542341e-05, |
|
"loss": 0.5161, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.22626079054975012, |
|
"grad_norm": 0.2747890186827313, |
|
"learning_rate": 2.7545150779130858e-05, |
|
"loss": 0.557, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.22716946842344388, |
|
"grad_norm": 0.28216298108132126, |
|
"learning_rate": 2.7520372981201362e-05, |
|
"loss": 0.529, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.22807814629713766, |
|
"grad_norm": 0.2827987705048485, |
|
"learning_rate": 2.7495482025573817e-05, |
|
"loss": 0.5326, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.22898682417083144, |
|
"grad_norm": 0.2588695610411959, |
|
"learning_rate": 2.747047813720983e-05, |
|
"loss": 0.5248, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.22989550204452522, |
|
"grad_norm": 0.27906004523644146, |
|
"learning_rate": 2.7445361542091687e-05, |
|
"loss": 0.5424, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.230804179918219, |
|
"grad_norm": 0.25979694399888736, |
|
"learning_rate": 2.74201324672203e-05, |
|
"loss": 0.5427, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.23171285779191278, |
|
"grad_norm": 0.3338334113235304, |
|
"learning_rate": 2.739479114061316e-05, |
|
"loss": 0.5407, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.23262153566560653, |
|
"grad_norm": 0.3034660576427874, |
|
"learning_rate": 2.7369337791302272e-05, |
|
"loss": 0.5381, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.2335302135393003, |
|
"grad_norm": 0.2770064979315646, |
|
"learning_rate": 2.7343772649332097e-05, |
|
"loss": 0.5628, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.2344388914129941, |
|
"grad_norm": 0.3235843684101457, |
|
"learning_rate": 2.7318095945757465e-05, |
|
"loss": 0.5488, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.23534756928668787, |
|
"grad_norm": 0.2656277121687186, |
|
"learning_rate": 2.729230791264148e-05, |
|
"loss": 0.5386, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.23625624716038165, |
|
"grad_norm": 0.2629765514542018, |
|
"learning_rate": 2.7266408783053437e-05, |
|
"loss": 0.5301, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.2371649250340754, |
|
"grad_norm": 0.2977558536425956, |
|
"learning_rate": 2.72403987910667e-05, |
|
"loss": 0.5282, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.2380736029077692, |
|
"grad_norm": 0.26390918112160183, |
|
"learning_rate": 2.7214278171756602e-05, |
|
"loss": 0.5534, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.23898228078146297, |
|
"grad_norm": 0.2913114222615873, |
|
"learning_rate": 2.7188047161198318e-05, |
|
"loss": 0.5296, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.23989095865515675, |
|
"grad_norm": 0.2843782894394526, |
|
"learning_rate": 2.716170599646471e-05, |
|
"loss": 0.5352, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.24079963652885053, |
|
"grad_norm": 0.28784903348306884, |
|
"learning_rate": 2.7135254915624213e-05, |
|
"loss": 0.5135, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2417083144025443, |
|
"grad_norm": 0.282373175191605, |
|
"learning_rate": 2.710869415773867e-05, |
|
"loss": 0.5368, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.24261699227623806, |
|
"grad_norm": 0.27538619157579186, |
|
"learning_rate": 2.7082023962861164e-05, |
|
"loss": 0.5285, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.24352567014993184, |
|
"grad_norm": 0.26568964109400045, |
|
"learning_rate": 2.7055244572033877e-05, |
|
"loss": 0.5343, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.24443434802362562, |
|
"grad_norm": 0.27304231694701525, |
|
"learning_rate": 2.702835622728587e-05, |
|
"loss": 0.5247, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.2453430258973194, |
|
"grad_norm": 0.2712736651714342, |
|
"learning_rate": 2.700135917163092e-05, |
|
"loss": 0.5438, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2462517037710132, |
|
"grad_norm": 0.27996658794844825, |
|
"learning_rate": 2.697425364906534e-05, |
|
"loss": 0.5343, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.24716038164470694, |
|
"grad_norm": 0.2734492688955939, |
|
"learning_rate": 2.694703990456573e-05, |
|
"loss": 0.5215, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.24806905951840072, |
|
"grad_norm": 0.25090613623072067, |
|
"learning_rate": 2.69197181840868e-05, |
|
"loss": 0.5077, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.2489777373920945, |
|
"grad_norm": 0.26818173165244835, |
|
"learning_rate": 2.689228873455914e-05, |
|
"loss": 0.5563, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.24988641526578828, |
|
"grad_norm": 0.29301009152758006, |
|
"learning_rate": 2.6864751803886963e-05, |
|
"loss": 0.5164, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.25079509313948206, |
|
"grad_norm": 0.27801377651380216, |
|
"learning_rate": 2.6837107640945904e-05, |
|
"loss": 0.5234, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.2517037710131758, |
|
"grad_norm": 0.2757615453146622, |
|
"learning_rate": 2.6809356495580747e-05, |
|
"loss": 0.5019, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.2526124488868696, |
|
"grad_norm": 0.28705078185667204, |
|
"learning_rate": 2.6781498618603163e-05, |
|
"loss": 0.5428, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.2535211267605634, |
|
"grad_norm": 0.27743176091962085, |
|
"learning_rate": 2.6753534261789454e-05, |
|
"loss": 0.5282, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.25442980463425713, |
|
"grad_norm": 0.2781590591093593, |
|
"learning_rate": 2.672546367787828e-05, |
|
"loss": 0.533, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.25533848250795094, |
|
"grad_norm": 0.26269046529463025, |
|
"learning_rate": 2.6697287120568364e-05, |
|
"loss": 0.534, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.2562471603816447, |
|
"grad_norm": 0.2976310747283509, |
|
"learning_rate": 2.666900484451621e-05, |
|
"loss": 0.5485, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.2571558382553385, |
|
"grad_norm": 0.27008030574397085, |
|
"learning_rate": 2.664061710533379e-05, |
|
"loss": 0.5378, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.25806451612903225, |
|
"grad_norm": 0.31570883702046254, |
|
"learning_rate": 2.661212415958624e-05, |
|
"loss": 0.5487, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.25897319400272606, |
|
"grad_norm": 0.2792688424866495, |
|
"learning_rate": 2.6583526264789538e-05, |
|
"loss": 0.5446, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2598818718764198, |
|
"grad_norm": 0.2777118844456695, |
|
"learning_rate": 2.6554823679408195e-05, |
|
"loss": 0.5023, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.26079054975011356, |
|
"grad_norm": 0.31599907246218734, |
|
"learning_rate": 2.6526016662852887e-05, |
|
"loss": 0.5239, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.2616992276238074, |
|
"grad_norm": 0.26231651760848274, |
|
"learning_rate": 2.649710547547813e-05, |
|
"loss": 0.5261, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.2626079054975011, |
|
"grad_norm": 0.3981071563722695, |
|
"learning_rate": 2.6468090378579933e-05, |
|
"loss": 0.5549, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.26351658337119493, |
|
"grad_norm": 0.2711164984704392, |
|
"learning_rate": 2.6438971634393416e-05, |
|
"loss": 0.5534, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2644252612448887, |
|
"grad_norm": 0.26684683603216064, |
|
"learning_rate": 2.6409749506090456e-05, |
|
"loss": 0.5361, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.26533393911858244, |
|
"grad_norm": 0.2845466784771231, |
|
"learning_rate": 2.6380424257777308e-05, |
|
"loss": 0.5339, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.26624261699227625, |
|
"grad_norm": 0.2731604690037556, |
|
"learning_rate": 2.6350996154492207e-05, |
|
"loss": 0.5042, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.26715129486597, |
|
"grad_norm": 0.3036904378905144, |
|
"learning_rate": 2.6321465462202985e-05, |
|
"loss": 0.5345, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.2680599727396638, |
|
"grad_norm": 0.24972173802456996, |
|
"learning_rate": 2.6291832447804658e-05, |
|
"loss": 0.521, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.26896865061335756, |
|
"grad_norm": 0.3007524094442608, |
|
"learning_rate": 2.6262097379117015e-05, |
|
"loss": 0.5585, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.2698773284870513, |
|
"grad_norm": 0.2362622844963897, |
|
"learning_rate": 2.623226052488221e-05, |
|
"loss": 0.5234, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.2707860063607451, |
|
"grad_norm": 0.23694975837443996, |
|
"learning_rate": 2.620232215476231e-05, |
|
"loss": 0.5302, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.2716946842344389, |
|
"grad_norm": 0.2549336943473087, |
|
"learning_rate": 2.6172282539336883e-05, |
|
"loss": 0.5046, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.2726033621081327, |
|
"grad_norm": 0.27313340626086924, |
|
"learning_rate": 2.6142141950100533e-05, |
|
"loss": 0.5341, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.27351203998182644, |
|
"grad_norm": 0.24090945244526518, |
|
"learning_rate": 2.6111900659460455e-05, |
|
"loss": 0.5174, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.2744207178555202, |
|
"grad_norm": 0.2577327230433516, |
|
"learning_rate": 2.6081558940733974e-05, |
|
"loss": 0.5328, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.275329395729214, |
|
"grad_norm": 0.2751050941709027, |
|
"learning_rate": 2.6051117068146073e-05, |
|
"loss": 0.5304, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.27623807360290775, |
|
"grad_norm": 0.24804858681864422, |
|
"learning_rate": 2.6020575316826912e-05, |
|
"loss": 0.5271, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.27714675147660156, |
|
"grad_norm": 0.2661161507676517, |
|
"learning_rate": 2.598993396280934e-05, |
|
"loss": 0.5082, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.2780554293502953, |
|
"grad_norm": 0.2604861986147373, |
|
"learning_rate": 2.595919328302641e-05, |
|
"loss": 0.5237, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.2789641072239891, |
|
"grad_norm": 0.26623040665758096, |
|
"learning_rate": 2.5928353555308873e-05, |
|
"loss": 0.521, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.2798727850976829, |
|
"grad_norm": 0.2545017248380541, |
|
"learning_rate": 2.5897415058382645e-05, |
|
"loss": 0.5244, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.2807814629713766, |
|
"grad_norm": 0.2977913139309013, |
|
"learning_rate": 2.5866378071866338e-05, |
|
"loss": 0.536, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.28169014084507044, |
|
"grad_norm": 0.2640290725076467, |
|
"learning_rate": 2.5835242876268667e-05, |
|
"loss": 0.5169, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2825988187187642, |
|
"grad_norm": 0.272710612040365, |
|
"learning_rate": 2.5804009752985975e-05, |
|
"loss": 0.5013, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.283507496592458, |
|
"grad_norm": 0.25625692372703074, |
|
"learning_rate": 2.577267898429966e-05, |
|
"loss": 0.5268, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.28441617446615175, |
|
"grad_norm": 0.2899485136934706, |
|
"learning_rate": 2.574125085337361e-05, |
|
"loss": 0.5179, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.2853248523398455, |
|
"grad_norm": 0.2719023563096517, |
|
"learning_rate": 2.570972564425169e-05, |
|
"loss": 0.5258, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.2862335302135393, |
|
"grad_norm": 0.27836053706475294, |
|
"learning_rate": 2.5678103641855122e-05, |
|
"loss": 0.5257, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.28714220808723306, |
|
"grad_norm": 0.26623698724894534, |
|
"learning_rate": 2.564638513197995e-05, |
|
"loss": 0.5325, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.2880508859609269, |
|
"grad_norm": 0.27117077475624796, |
|
"learning_rate": 2.5614570401294434e-05, |
|
"loss": 0.5123, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.2889595638346206, |
|
"grad_norm": 0.27926276040984455, |
|
"learning_rate": 2.5582659737336468e-05, |
|
"loss": 0.5344, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.2898682417083144, |
|
"grad_norm": 0.2693820558084933, |
|
"learning_rate": 2.555065342851098e-05, |
|
"loss": 0.5381, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.2907769195820082, |
|
"grad_norm": 0.24510109791420934, |
|
"learning_rate": 2.5518551764087326e-05, |
|
"loss": 0.5213, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.29168559745570194, |
|
"grad_norm": 0.22664216238654272, |
|
"learning_rate": 2.5486355034196686e-05, |
|
"loss": 0.532, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.29259427532939575, |
|
"grad_norm": 0.26225099787887585, |
|
"learning_rate": 2.5454063529829405e-05, |
|
"loss": 0.5343, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.2935029532030895, |
|
"grad_norm": 0.27572506715780165, |
|
"learning_rate": 2.5421677542832406e-05, |
|
"loss": 0.5238, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.29441163107678325, |
|
"grad_norm": 0.2400899275047973, |
|
"learning_rate": 2.538919736590654e-05, |
|
"loss": 0.517, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.29532030895047706, |
|
"grad_norm": 0.249003004458264, |
|
"learning_rate": 2.5356623292603923e-05, |
|
"loss": 0.5425, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.2962289868241708, |
|
"grad_norm": 0.27495906745966536, |
|
"learning_rate": 2.53239556173253e-05, |
|
"loss": 0.5268, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.2971376646978646, |
|
"grad_norm": 0.2932613052572052, |
|
"learning_rate": 2.529119463531739e-05, |
|
"loss": 0.5304, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.2980463425715584, |
|
"grad_norm": 0.4467908726632037, |
|
"learning_rate": 2.525834064267019e-05, |
|
"loss": 0.5188, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.2989550204452522, |
|
"grad_norm": 0.2563779834024989, |
|
"learning_rate": 2.5225393936314328e-05, |
|
"loss": 0.5217, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.29986369831894594, |
|
"grad_norm": 0.2828739499393757, |
|
"learning_rate": 2.519235481401837e-05, |
|
"loss": 0.5417, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.3007723761926397, |
|
"grad_norm": 0.29573580299114555, |
|
"learning_rate": 2.5159223574386117e-05, |
|
"loss": 0.5336, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.3016810540663335, |
|
"grad_norm": 0.2612869803912411, |
|
"learning_rate": 2.512600051685392e-05, |
|
"loss": 0.5066, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.30258973194002725, |
|
"grad_norm": 0.24637368673414986, |
|
"learning_rate": 2.509268594168797e-05, |
|
"loss": 0.5121, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.30349840981372106, |
|
"grad_norm": 0.2593406096692407, |
|
"learning_rate": 2.5059280149981598e-05, |
|
"loss": 0.5173, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.3044070876874148, |
|
"grad_norm": 0.28821198005365534, |
|
"learning_rate": 2.5025783443652504e-05, |
|
"loss": 0.5307, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.30531576556110857, |
|
"grad_norm": 0.24808902238741312, |
|
"learning_rate": 2.49921961254401e-05, |
|
"loss": 0.5319, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.3062244434348024, |
|
"grad_norm": 0.23581257303037423, |
|
"learning_rate": 2.4958518498902725e-05, |
|
"loss": 0.5056, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.3071331213084961, |
|
"grad_norm": 0.30488588123260907, |
|
"learning_rate": 2.4924750868414907e-05, |
|
"loss": 0.5264, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.30804179918218993, |
|
"grad_norm": 0.2332850709261921, |
|
"learning_rate": 2.489089353916463e-05, |
|
"loss": 0.5085, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.3089504770558837, |
|
"grad_norm": 0.2556648236725686, |
|
"learning_rate": 2.4856946817150555e-05, |
|
"loss": 0.5317, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.30985915492957744, |
|
"grad_norm": 0.25567477216423434, |
|
"learning_rate": 2.482291100917928e-05, |
|
"loss": 0.5161, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.31076783280327125, |
|
"grad_norm": 0.27072507173981075, |
|
"learning_rate": 2.478878642286253e-05, |
|
"loss": 0.5321, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.311676510676965, |
|
"grad_norm": 0.2605115373745453, |
|
"learning_rate": 2.4754573366614418e-05, |
|
"loss": 0.5256, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.3125851885506588, |
|
"grad_norm": 0.24858959605676012, |
|
"learning_rate": 2.4720272149648632e-05, |
|
"loss": 0.5386, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.31349386642435256, |
|
"grad_norm": 0.24688132317645303, |
|
"learning_rate": 2.4685883081975646e-05, |
|
"loss": 0.4936, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.3144025442980463, |
|
"grad_norm": 0.24959974360358334, |
|
"learning_rate": 2.465140647439991e-05, |
|
"loss": 0.5247, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.3153112221717401, |
|
"grad_norm": 0.25991911172601784, |
|
"learning_rate": 2.4616842638517064e-05, |
|
"loss": 0.5362, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.3162199000454339, |
|
"grad_norm": 0.23071672077330374, |
|
"learning_rate": 2.4582191886711093e-05, |
|
"loss": 0.5209, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.3171285779191277, |
|
"grad_norm": 0.2929778093811859, |
|
"learning_rate": 2.4547454532151535e-05, |
|
"loss": 0.5096, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.31803725579282144, |
|
"grad_norm": 0.2389578721549624, |
|
"learning_rate": 2.4512630888790623e-05, |
|
"loss": 0.5139, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.31894593366651525, |
|
"grad_norm": 0.26314034132893005, |
|
"learning_rate": 2.447772127136046e-05, |
|
"loss": 0.5227, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.319854611540209, |
|
"grad_norm": 0.2875398798502879, |
|
"learning_rate": 2.4442725995370172e-05, |
|
"loss": 0.5293, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.32076328941390275, |
|
"grad_norm": 0.2419783839135977, |
|
"learning_rate": 2.4407645377103056e-05, |
|
"loss": 0.5063, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.32167196728759656, |
|
"grad_norm": 0.247103751631837, |
|
"learning_rate": 2.4372479733613728e-05, |
|
"loss": 0.5351, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.3225806451612903, |
|
"grad_norm": 0.2659228516335401, |
|
"learning_rate": 2.4337229382725245e-05, |
|
"loss": 0.5115, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3234893230349841, |
|
"grad_norm": 0.2831531321069596, |
|
"learning_rate": 2.430189464302625e-05, |
|
"loss": 0.5331, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.3243980009086779, |
|
"grad_norm": 0.24482732390316203, |
|
"learning_rate": 2.4266475833868074e-05, |
|
"loss": 0.5233, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.3253066787823716, |
|
"grad_norm": 0.23977791537507986, |
|
"learning_rate": 2.4230973275361858e-05, |
|
"loss": 0.5355, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.32621535665606544, |
|
"grad_norm": 0.24718631252268783, |
|
"learning_rate": 2.4195387288375667e-05, |
|
"loss": 0.526, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.3271240345297592, |
|
"grad_norm": 0.23087062790002885, |
|
"learning_rate": 2.4159718194531573e-05, |
|
"loss": 0.534, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.328032712403453, |
|
"grad_norm": 0.24621497452205773, |
|
"learning_rate": 2.4123966316202768e-05, |
|
"loss": 0.5249, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.32894139027714675, |
|
"grad_norm": 0.23192899573599338, |
|
"learning_rate": 2.4088131976510633e-05, |
|
"loss": 0.5029, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.3298500681508405, |
|
"grad_norm": 0.26992995982330875, |
|
"learning_rate": 2.4052215499321837e-05, |
|
"loss": 0.537, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.3307587460245343, |
|
"grad_norm": 0.24505808341102853, |
|
"learning_rate": 2.4016217209245377e-05, |
|
"loss": 0.5144, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.33166742389822806, |
|
"grad_norm": 0.2593264924319726, |
|
"learning_rate": 2.398013743162969e-05, |
|
"loss": 0.5136, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.3325761017719219, |
|
"grad_norm": 0.26942263570642233, |
|
"learning_rate": 2.3943976492559675e-05, |
|
"loss": 0.526, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.3334847796456156, |
|
"grad_norm": 0.25903401709622187, |
|
"learning_rate": 2.3907734718853756e-05, |
|
"loss": 0.5274, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.3343934575193094, |
|
"grad_norm": 0.2629598083863886, |
|
"learning_rate": 2.3871412438060933e-05, |
|
"loss": 0.5229, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.3353021353930032, |
|
"grad_norm": 0.2661133422715312, |
|
"learning_rate": 2.3835009978457833e-05, |
|
"loss": 0.5508, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.33621081326669694, |
|
"grad_norm": 0.266262476527216, |
|
"learning_rate": 2.3798527669045714e-05, |
|
"loss": 0.4896, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.33711949114039075, |
|
"grad_norm": 0.2552822951159819, |
|
"learning_rate": 2.3761965839547515e-05, |
|
"loss": 0.5043, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.3380281690140845, |
|
"grad_norm": 0.26247510465009727, |
|
"learning_rate": 2.3725324820404864e-05, |
|
"loss": 0.5212, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.3389368468877783, |
|
"grad_norm": 0.26377209436914206, |
|
"learning_rate": 2.3688604942775104e-05, |
|
"loss": 0.5003, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.33984552476147206, |
|
"grad_norm": 0.2866216584456117, |
|
"learning_rate": 2.3651806538528277e-05, |
|
"loss": 0.5394, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.3407542026351658, |
|
"grad_norm": 0.321213963036692, |
|
"learning_rate": 2.3614929940244155e-05, |
|
"loss": 0.5228, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3416628805088596, |
|
"grad_norm": 0.27994833135501873, |
|
"learning_rate": 2.3577975481209214e-05, |
|
"loss": 0.5447, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.3425715583825534, |
|
"grad_norm": 0.26280605731029355, |
|
"learning_rate": 2.3540943495413624e-05, |
|
"loss": 0.5192, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.3434802362562472, |
|
"grad_norm": 0.28019047768430555, |
|
"learning_rate": 2.3503834317548227e-05, |
|
"loss": 0.5033, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.34438891412994094, |
|
"grad_norm": 0.2507309672329866, |
|
"learning_rate": 2.3466648283001542e-05, |
|
"loss": 0.5396, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.3452975920036347, |
|
"grad_norm": 0.29912678040761936, |
|
"learning_rate": 2.3429385727856674e-05, |
|
"loss": 0.5248, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3462062698773285, |
|
"grad_norm": 0.2637675471489468, |
|
"learning_rate": 2.3392046988888345e-05, |
|
"loss": 0.5139, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.34711494775102225, |
|
"grad_norm": 0.25935799301918555, |
|
"learning_rate": 2.33546324035598e-05, |
|
"loss": 0.5367, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.34802362562471606, |
|
"grad_norm": 0.3066244700426474, |
|
"learning_rate": 2.331714231001978e-05, |
|
"loss": 0.5009, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.3489323034984098, |
|
"grad_norm": 0.2992183911705901, |
|
"learning_rate": 2.3279577047099457e-05, |
|
"loss": 0.512, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.34984098137210357, |
|
"grad_norm": 0.2695589550918868, |
|
"learning_rate": 2.3241936954309382e-05, |
|
"loss": 0.5311, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.3507496592457974, |
|
"grad_norm": 0.23292795209077574, |
|
"learning_rate": 2.320422237183641e-05, |
|
"loss": 0.5101, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.3516583371194911, |
|
"grad_norm": 0.239637841319824, |
|
"learning_rate": 2.3166433640540602e-05, |
|
"loss": 0.5011, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.35256701499318494, |
|
"grad_norm": 0.29108219617293946, |
|
"learning_rate": 2.3128571101952207e-05, |
|
"loss": 0.5117, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.3534756928668787, |
|
"grad_norm": 0.259703792122574, |
|
"learning_rate": 2.3090635098268505e-05, |
|
"loss": 0.5, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.35438437074057244, |
|
"grad_norm": 0.2379621716490283, |
|
"learning_rate": 2.305262597235076e-05, |
|
"loss": 0.5228, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.35529304861426625, |
|
"grad_norm": 0.26001892708811136, |
|
"learning_rate": 2.3014544067721096e-05, |
|
"loss": 0.5176, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.35620172648796, |
|
"grad_norm": 0.24496114487545176, |
|
"learning_rate": 2.297638972855942e-05, |
|
"loss": 0.5331, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.3571104043616538, |
|
"grad_norm": 0.2518685059909613, |
|
"learning_rate": 2.293816329970027e-05, |
|
"loss": 0.5063, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.35801908223534756, |
|
"grad_norm": 0.250752775705928, |
|
"learning_rate": 2.289986512662974e-05, |
|
"loss": 0.5397, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.35892776010904137, |
|
"grad_norm": 0.2701120016725626, |
|
"learning_rate": 2.2861495555482337e-05, |
|
"loss": 0.5233, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.3598364379827351, |
|
"grad_norm": 0.2638105659756278, |
|
"learning_rate": 2.282305493303785e-05, |
|
"loss": 0.5294, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.3607451158564289, |
|
"grad_norm": 0.24041324098221906, |
|
"learning_rate": 2.2784543606718227e-05, |
|
"loss": 0.518, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.3616537937301227, |
|
"grad_norm": 0.259602640344262, |
|
"learning_rate": 2.274596192458443e-05, |
|
"loss": 0.5234, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.36256247160381644, |
|
"grad_norm": 0.23222802536767737, |
|
"learning_rate": 2.2707310235333284e-05, |
|
"loss": 0.4881, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.36347114947751025, |
|
"grad_norm": 0.2443445481061016, |
|
"learning_rate": 2.266858888829433e-05, |
|
"loss": 0.5235, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.364379827351204, |
|
"grad_norm": 0.26006596580077374, |
|
"learning_rate": 2.2629798233426677e-05, |
|
"loss": 0.5028, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.36528850522489775, |
|
"grad_norm": 0.22354431116478168, |
|
"learning_rate": 2.259093862131582e-05, |
|
"loss": 0.5173, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.36619718309859156, |
|
"grad_norm": 0.22101715081783416, |
|
"learning_rate": 2.2552010403170486e-05, |
|
"loss": 0.5164, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.3671058609722853, |
|
"grad_norm": 0.25855149226438745, |
|
"learning_rate": 2.2513013930819452e-05, |
|
"loss": 0.524, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.3680145388459791, |
|
"grad_norm": 0.2500646351802012, |
|
"learning_rate": 2.247394955670838e-05, |
|
"loss": 0.5099, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.3689232167196729, |
|
"grad_norm": 0.22785523897152013, |
|
"learning_rate": 2.243481763389661e-05, |
|
"loss": 0.5351, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.36983189459336663, |
|
"grad_norm": 0.2742012018148663, |
|
"learning_rate": 2.239561851605397e-05, |
|
"loss": 0.5294, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.37074057246706044, |
|
"grad_norm": 0.2917409445353074, |
|
"learning_rate": 2.2356352557457624e-05, |
|
"loss": 0.5245, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.3716492503407542, |
|
"grad_norm": 0.24786694428623812, |
|
"learning_rate": 2.2317020112988792e-05, |
|
"loss": 0.5141, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.372557928214448, |
|
"grad_norm": 0.23835087667090923, |
|
"learning_rate": 2.2277621538129612e-05, |
|
"loss": 0.4942, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.37346660608814175, |
|
"grad_norm": 0.24847073099295497, |
|
"learning_rate": 2.2238157188959893e-05, |
|
"loss": 0.515, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.3743752839618355, |
|
"grad_norm": 0.21988501584629094, |
|
"learning_rate": 2.2198627422153908e-05, |
|
"loss": 0.5312, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.3752839618355293, |
|
"grad_norm": 0.24762672440554007, |
|
"learning_rate": 2.2159032594977165e-05, |
|
"loss": 0.5197, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.37619263970922306, |
|
"grad_norm": 0.23443065509690622, |
|
"learning_rate": 2.211937306528318e-05, |
|
"loss": 0.5075, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.3771013175829169, |
|
"grad_norm": 0.27862711744360735, |
|
"learning_rate": 2.2079649191510248e-05, |
|
"loss": 0.4837, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.3780099954566106, |
|
"grad_norm": 0.23994539916874946, |
|
"learning_rate": 2.203986133267818e-05, |
|
"loss": 0.5129, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.37891867333030443, |
|
"grad_norm": 0.2321097118020647, |
|
"learning_rate": 2.2000009848385107e-05, |
|
"loss": 0.5113, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.3798273512039982, |
|
"grad_norm": 0.24702140514455406, |
|
"learning_rate": 2.196009509880417e-05, |
|
"loss": 0.5276, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.38073602907769194, |
|
"grad_norm": 0.31011337504149433, |
|
"learning_rate": 2.1920117444680317e-05, |
|
"loss": 0.5235, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.38164470695138575, |
|
"grad_norm": 0.25024867689118563, |
|
"learning_rate": 2.1880077247327008e-05, |
|
"loss": 0.5319, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3825533848250795, |
|
"grad_norm": 0.2657215152096355, |
|
"learning_rate": 2.1839974868622956e-05, |
|
"loss": 0.5178, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.3834620626987733, |
|
"grad_norm": 0.2581802279830013, |
|
"learning_rate": 2.1799810671008887e-05, |
|
"loss": 0.5078, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.38437074057246706, |
|
"grad_norm": 0.2528887013846824, |
|
"learning_rate": 2.17595850174842e-05, |
|
"loss": 0.5234, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.3852794184461608, |
|
"grad_norm": 0.238989778351192, |
|
"learning_rate": 2.1719298271603766e-05, |
|
"loss": 0.5121, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.3861880963198546, |
|
"grad_norm": 0.2643788344384685, |
|
"learning_rate": 2.1678950797474578e-05, |
|
"loss": 0.4956, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.3870967741935484, |
|
"grad_norm": 0.24554204369119242, |
|
"learning_rate": 2.1638542959752485e-05, |
|
"loss": 0.5413, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.3880054520672422, |
|
"grad_norm": 0.23118345256737963, |
|
"learning_rate": 2.1598075123638903e-05, |
|
"loss": 0.5137, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.38891412994093594, |
|
"grad_norm": 0.25400418452076406, |
|
"learning_rate": 2.1557547654877506e-05, |
|
"loss": 0.508, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.3898228078146297, |
|
"grad_norm": 0.22797693534084318, |
|
"learning_rate": 2.1516960919750914e-05, |
|
"loss": 0.5022, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.3907314856883235, |
|
"grad_norm": 0.23059282438934506, |
|
"learning_rate": 2.1476315285077393e-05, |
|
"loss": 0.4997, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.39164016356201725, |
|
"grad_norm": 0.24684953496844692, |
|
"learning_rate": 2.1435611118207546e-05, |
|
"loss": 0.524, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.39254884143571106, |
|
"grad_norm": 0.24358371474217685, |
|
"learning_rate": 2.1394848787020963e-05, |
|
"loss": 0.4959, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.3934575193094048, |
|
"grad_norm": 0.24009597119673534, |
|
"learning_rate": 2.1354028659922938e-05, |
|
"loss": 0.5065, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.39436619718309857, |
|
"grad_norm": 0.21258467054067773, |
|
"learning_rate": 2.13131511058411e-05, |
|
"loss": 0.5027, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.3952748750567924, |
|
"grad_norm": 0.2332547982597397, |
|
"learning_rate": 2.1272216494222108e-05, |
|
"loss": 0.521, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.3961835529304861, |
|
"grad_norm": 0.26527023398234467, |
|
"learning_rate": 2.12312251950283e-05, |
|
"loss": 0.51, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.39709223080417994, |
|
"grad_norm": 0.24686604477814575, |
|
"learning_rate": 2.1190177578734336e-05, |
|
"loss": 0.5217, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.3980009086778737, |
|
"grad_norm": 0.23200522771280188, |
|
"learning_rate": 2.114907401632389e-05, |
|
"loss": 0.5182, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.3989095865515675, |
|
"grad_norm": 0.22649180511207603, |
|
"learning_rate": 2.1107914879286237e-05, |
|
"loss": 0.5103, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.39981826442526125, |
|
"grad_norm": 0.2885194149892351, |
|
"learning_rate": 2.1066700539612964e-05, |
|
"loss": 0.5293, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.400726942298955, |
|
"grad_norm": 0.22579048097840532, |
|
"learning_rate": 2.1025431369794546e-05, |
|
"loss": 0.5257, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.4016356201726488, |
|
"grad_norm": 0.27513365175752, |
|
"learning_rate": 2.0984107742817016e-05, |
|
"loss": 0.5401, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.40254429804634256, |
|
"grad_norm": 0.2884279516810869, |
|
"learning_rate": 2.0942730032158586e-05, |
|
"loss": 0.5227, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.40345297592003637, |
|
"grad_norm": 0.276114207969109, |
|
"learning_rate": 2.0901298611786274e-05, |
|
"loss": 0.5297, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.4043616537937301, |
|
"grad_norm": 0.254987448745979, |
|
"learning_rate": 2.0859813856152513e-05, |
|
"loss": 0.508, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4052703316674239, |
|
"grad_norm": 0.2700548502662214, |
|
"learning_rate": 2.081827614019177e-05, |
|
"loss": 0.5056, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.4061790095411177, |
|
"grad_norm": 0.26239233598339107, |
|
"learning_rate": 2.077668583931718e-05, |
|
"loss": 0.5185, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.40708768741481144, |
|
"grad_norm": 0.21565404347135067, |
|
"learning_rate": 2.0735043329417115e-05, |
|
"loss": 0.4941, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.40799636528850525, |
|
"grad_norm": 0.2728801118505973, |
|
"learning_rate": 2.0693348986851817e-05, |
|
"loss": 0.512, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.408905043162199, |
|
"grad_norm": 0.2754327035303729, |
|
"learning_rate": 2.0651603188449976e-05, |
|
"loss": 0.5139, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.40981372103589275, |
|
"grad_norm": 0.2655235441029784, |
|
"learning_rate": 2.0609806311505345e-05, |
|
"loss": 0.4772, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.41072239890958656, |
|
"grad_norm": 0.2440498900832255, |
|
"learning_rate": 2.056795873377331e-05, |
|
"loss": 0.4951, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.4116310767832803, |
|
"grad_norm": 0.2571323059977, |
|
"learning_rate": 2.0526060833467495e-05, |
|
"loss": 0.525, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.4125397546569741, |
|
"grad_norm": 0.24645495086607, |
|
"learning_rate": 2.0484112989256327e-05, |
|
"loss": 0.5259, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.4134484325306679, |
|
"grad_norm": 0.262434196340489, |
|
"learning_rate": 2.0442115580259615e-05, |
|
"loss": 0.5268, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.41435711040436163, |
|
"grad_norm": 0.24807419069104444, |
|
"learning_rate": 2.0400068986045142e-05, |
|
"loss": 0.5177, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.41526578827805544, |
|
"grad_norm": 0.23338885931168596, |
|
"learning_rate": 2.035797358662521e-05, |
|
"loss": 0.5211, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.4161744661517492, |
|
"grad_norm": 0.23650378253272059, |
|
"learning_rate": 2.0315829762453223e-05, |
|
"loss": 0.4978, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.417083144025443, |
|
"grad_norm": 0.2372961976074164, |
|
"learning_rate": 2.0273637894420236e-05, |
|
"loss": 0.5214, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.41799182189913675, |
|
"grad_norm": 0.2677513909886547, |
|
"learning_rate": 2.023139836385153e-05, |
|
"loss": 0.5357, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.41890049977283056, |
|
"grad_norm": 0.23710027013958848, |
|
"learning_rate": 2.0189111552503142e-05, |
|
"loss": 0.5196, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.4198091776465243, |
|
"grad_norm": 0.21399195686356814, |
|
"learning_rate": 2.014677784255844e-05, |
|
"loss": 0.5083, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.42071785552021806, |
|
"grad_norm": 0.24893610234658364, |
|
"learning_rate": 2.0104397616624646e-05, |
|
"loss": 0.5075, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.4216265333939119, |
|
"grad_norm": 0.23670544045308084, |
|
"learning_rate": 2.006197125772939e-05, |
|
"loss": 0.5059, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.4225352112676056, |
|
"grad_norm": 0.26452779453858905, |
|
"learning_rate": 2.0019499149317252e-05, |
|
"loss": 0.5156, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.42344388914129943, |
|
"grad_norm": 0.2766916466032981, |
|
"learning_rate": 1.997698167524628e-05, |
|
"loss": 0.5102, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.4243525670149932, |
|
"grad_norm": 0.23222477899271163, |
|
"learning_rate": 1.9934419219784544e-05, |
|
"loss": 0.5002, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.42526124488868694, |
|
"grad_norm": 0.311260792933651, |
|
"learning_rate": 1.9891812167606645e-05, |
|
"loss": 0.5282, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.42616992276238075, |
|
"grad_norm": 0.3199547422816849, |
|
"learning_rate": 1.984916090379024e-05, |
|
"loss": 0.5002, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.4270786006360745, |
|
"grad_norm": 0.22312839762188058, |
|
"learning_rate": 1.980646581381258e-05, |
|
"loss": 0.5219, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4279872785097683, |
|
"grad_norm": 0.2599655899356632, |
|
"learning_rate": 1.976372728354699e-05, |
|
"loss": 0.5177, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.42889595638346206, |
|
"grad_norm": 0.23107652252431182, |
|
"learning_rate": 1.972094569925942e-05, |
|
"loss": 0.5435, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.4298046342571558, |
|
"grad_norm": 0.24171392550647774, |
|
"learning_rate": 1.967812144760493e-05, |
|
"loss": 0.5207, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.4307133121308496, |
|
"grad_norm": 0.2345576421072366, |
|
"learning_rate": 1.963525491562421e-05, |
|
"loss": 0.5438, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.4316219900045434, |
|
"grad_norm": 0.2658024201064496, |
|
"learning_rate": 1.959234649074007e-05, |
|
"loss": 0.5297, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4325306678782372, |
|
"grad_norm": 0.250262769604315, |
|
"learning_rate": 1.954939656075394e-05, |
|
"loss": 0.517, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.43343934575193094, |
|
"grad_norm": 0.25500108379604414, |
|
"learning_rate": 1.950640551384237e-05, |
|
"loss": 0.5094, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.4343480236256247, |
|
"grad_norm": 0.28529434407052306, |
|
"learning_rate": 1.9463373738553523e-05, |
|
"loss": 0.5276, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.4352567014993185, |
|
"grad_norm": 0.23120960399287802, |
|
"learning_rate": 1.9420301623803666e-05, |
|
"loss": 0.538, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.43616537937301225, |
|
"grad_norm": 0.25050816340583165, |
|
"learning_rate": 1.9377189558873634e-05, |
|
"loss": 0.5495, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.43707405724670606, |
|
"grad_norm": 0.22534575878035212, |
|
"learning_rate": 1.9334037933405337e-05, |
|
"loss": 0.5186, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.4379827351203998, |
|
"grad_norm": 0.22295075494230848, |
|
"learning_rate": 1.9290847137398228e-05, |
|
"loss": 0.5165, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.4388914129940936, |
|
"grad_norm": 0.23579161097603182, |
|
"learning_rate": 1.924761756120578e-05, |
|
"loss": 0.4976, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.4398000908677874, |
|
"grad_norm": 0.21832007686471547, |
|
"learning_rate": 1.9204349595531954e-05, |
|
"loss": 0.5218, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.4407087687414811, |
|
"grad_norm": 0.25708298869793, |
|
"learning_rate": 1.916104363142767e-05, |
|
"loss": 0.5062, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.44161744661517494, |
|
"grad_norm": 0.2268902295276673, |
|
"learning_rate": 1.911770006028728e-05, |
|
"loss": 0.5143, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.4425261244888687, |
|
"grad_norm": 0.24778267034905432, |
|
"learning_rate": 1.907431927384501e-05, |
|
"loss": 0.5069, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.4434348023625625, |
|
"grad_norm": 0.23357617111003176, |
|
"learning_rate": 1.9030901664171457e-05, |
|
"loss": 0.5085, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.44434348023625625, |
|
"grad_norm": 0.239662748648008, |
|
"learning_rate": 1.8987447623669992e-05, |
|
"loss": 0.5222, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.44525215810995, |
|
"grad_norm": 0.25708866983380263, |
|
"learning_rate": 1.8943957545073273e-05, |
|
"loss": 0.5308, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.4461608359836438, |
|
"grad_norm": 0.21783878351194902, |
|
"learning_rate": 1.8900431821439644e-05, |
|
"loss": 0.5251, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.44706951385733756, |
|
"grad_norm": 0.29948916904218814, |
|
"learning_rate": 1.8856870846149618e-05, |
|
"loss": 0.4985, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.44797819173103137, |
|
"grad_norm": 0.23300153547188593, |
|
"learning_rate": 1.8813275012902307e-05, |
|
"loss": 0.5163, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.4488868696047251, |
|
"grad_norm": 0.24619621895860472, |
|
"learning_rate": 1.8769644715711854e-05, |
|
"loss": 0.5106, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.4497955474784189, |
|
"grad_norm": 0.22818266306630752, |
|
"learning_rate": 1.8725980348903898e-05, |
|
"loss": 0.514, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4507042253521127, |
|
"grad_norm": 0.22392621802844048, |
|
"learning_rate": 1.8682282307111988e-05, |
|
"loss": 0.5147, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.45161290322580644, |
|
"grad_norm": 0.22400650722344845, |
|
"learning_rate": 1.8638550985274025e-05, |
|
"loss": 0.5283, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.45252158109950025, |
|
"grad_norm": 0.24057049058886684, |
|
"learning_rate": 1.8594786778628686e-05, |
|
"loss": 0.5105, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.453430258973194, |
|
"grad_norm": 0.23834314235207676, |
|
"learning_rate": 1.8550990082711868e-05, |
|
"loss": 0.5142, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.45433893684688775, |
|
"grad_norm": 0.22159251074212913, |
|
"learning_rate": 1.850716129335309e-05, |
|
"loss": 0.5037, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.45524761472058156, |
|
"grad_norm": 0.21883306577809505, |
|
"learning_rate": 1.8463300806671936e-05, |
|
"loss": 0.5243, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.4561562925942753, |
|
"grad_norm": 0.25080121721482435, |
|
"learning_rate": 1.8419409019074474e-05, |
|
"loss": 0.5167, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.4570649704679691, |
|
"grad_norm": 0.23602150357150198, |
|
"learning_rate": 1.837548632724964e-05, |
|
"loss": 0.5331, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.4579736483416629, |
|
"grad_norm": 0.22751790967544427, |
|
"learning_rate": 1.8331533128165713e-05, |
|
"loss": 0.5092, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.45888232621535663, |
|
"grad_norm": 0.21166065633290324, |
|
"learning_rate": 1.8287549819066665e-05, |
|
"loss": 0.5291, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.45979100408905044, |
|
"grad_norm": 0.21303196091882118, |
|
"learning_rate": 1.824353679746861e-05, |
|
"loss": 0.5109, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.4606996819627442, |
|
"grad_norm": 0.2520169158737603, |
|
"learning_rate": 1.8199494461156203e-05, |
|
"loss": 0.5209, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.461608359836438, |
|
"grad_norm": 0.23204211684465506, |
|
"learning_rate": 1.815542320817904e-05, |
|
"loss": 0.5114, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.46251703771013175, |
|
"grad_norm": 0.22206027132403938, |
|
"learning_rate": 1.8111323436848057e-05, |
|
"loss": 0.5101, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.46342571558382556, |
|
"grad_norm": 0.2197827409132378, |
|
"learning_rate": 1.8067195545731942e-05, |
|
"loss": 0.5085, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.4643343934575193, |
|
"grad_norm": 0.22001178565277088, |
|
"learning_rate": 1.802303993365353e-05, |
|
"loss": 0.4998, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.46524307133121307, |
|
"grad_norm": 0.24609354561612468, |
|
"learning_rate": 1.7978856999686182e-05, |
|
"loss": 0.5178, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.4661517492049069, |
|
"grad_norm": 0.22050322371740638, |
|
"learning_rate": 1.7934647143150202e-05, |
|
"loss": 0.5227, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.4670604270786006, |
|
"grad_norm": 0.21073901613729007, |
|
"learning_rate": 1.7890410763609212e-05, |
|
"loss": 0.5006, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.46796910495229443, |
|
"grad_norm": 0.24141869359808918, |
|
"learning_rate": 1.784614826086655e-05, |
|
"loss": 0.5014, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.4688777828259882, |
|
"grad_norm": 0.2279765094681949, |
|
"learning_rate": 1.780186003496164e-05, |
|
"loss": 0.5133, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.46978646069968194, |
|
"grad_norm": 0.23855167092742002, |
|
"learning_rate": 1.775754648616642e-05, |
|
"loss": 0.5135, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.47069513857337575, |
|
"grad_norm": 0.21569471812715105, |
|
"learning_rate": 1.771320801498165e-05, |
|
"loss": 0.5272, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.4716038164470695, |
|
"grad_norm": 0.23601732409769477, |
|
"learning_rate": 1.7668845022133366e-05, |
|
"loss": 0.5359, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.4725124943207633, |
|
"grad_norm": 0.2421070076653461, |
|
"learning_rate": 1.7624457908569226e-05, |
|
"loss": 0.5151, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.47342117219445706, |
|
"grad_norm": 0.21994200464509472, |
|
"learning_rate": 1.7580047075454877e-05, |
|
"loss": 0.5257, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.4743298500681508, |
|
"grad_norm": 0.21413213779222634, |
|
"learning_rate": 1.753561292417035e-05, |
|
"loss": 0.5139, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.4752385279418446, |
|
"grad_norm": 0.2273069993329325, |
|
"learning_rate": 1.7491155856306406e-05, |
|
"loss": 0.5195, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.4761472058155384, |
|
"grad_norm": 0.22197392010344422, |
|
"learning_rate": 1.7446676273660954e-05, |
|
"loss": 0.4834, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.4770558836892322, |
|
"grad_norm": 0.21867064721147128, |
|
"learning_rate": 1.7402174578235356e-05, |
|
"loss": 0.5227, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.47796456156292594, |
|
"grad_norm": 0.2237126581240893, |
|
"learning_rate": 1.7357651172230852e-05, |
|
"loss": 0.5375, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.4788732394366197, |
|
"grad_norm": 0.23009669061440477, |
|
"learning_rate": 1.731310645804489e-05, |
|
"loss": 0.5411, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.4797819173103135, |
|
"grad_norm": 0.21945124564589844, |
|
"learning_rate": 1.7268540838267493e-05, |
|
"loss": 0.4903, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.48069059518400725, |
|
"grad_norm": 0.21735802469423854, |
|
"learning_rate": 1.722395471567763e-05, |
|
"loss": 0.526, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.48159927305770106, |
|
"grad_norm": 0.2259120283462735, |
|
"learning_rate": 1.7179348493239582e-05, |
|
"loss": 0.5099, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4825079509313948, |
|
"grad_norm": 0.2236406553388163, |
|
"learning_rate": 1.713472257409928e-05, |
|
"loss": 0.5035, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.4834166288050886, |
|
"grad_norm": 0.2203648050330253, |
|
"learning_rate": 1.7090077361580667e-05, |
|
"loss": 0.5242, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.4843253066787824, |
|
"grad_norm": 0.21577849058700352, |
|
"learning_rate": 1.7045413259182077e-05, |
|
"loss": 0.5093, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.4852339845524761, |
|
"grad_norm": 0.26474820424051165, |
|
"learning_rate": 1.7000730670572542e-05, |
|
"loss": 0.5367, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.48614266242616994, |
|
"grad_norm": 0.21495788299510912, |
|
"learning_rate": 1.695602999958819e-05, |
|
"loss": 0.5144, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.4870513402998637, |
|
"grad_norm": 0.22089173778431492, |
|
"learning_rate": 1.6911311650228574e-05, |
|
"loss": 0.4924, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.4879600181735575, |
|
"grad_norm": 0.2276117083165815, |
|
"learning_rate": 1.6866576026653006e-05, |
|
"loss": 0.5011, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.48886869604725125, |
|
"grad_norm": 0.23006884025462068, |
|
"learning_rate": 1.682182353317695e-05, |
|
"loss": 0.5149, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.489777373920945, |
|
"grad_norm": 0.23790427840650574, |
|
"learning_rate": 1.67770545742683e-05, |
|
"loss": 0.5215, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.4906860517946388, |
|
"grad_norm": 0.20892374204766953, |
|
"learning_rate": 1.6732269554543794e-05, |
|
"loss": 0.535, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.49159472966833256, |
|
"grad_norm": 0.2160818063170021, |
|
"learning_rate": 1.668746887876531e-05, |
|
"loss": 0.5083, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.4925034075420264, |
|
"grad_norm": 0.20118901727019187, |
|
"learning_rate": 1.6642652951836233e-05, |
|
"loss": 0.4943, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.4934120854157201, |
|
"grad_norm": 0.22282897518570308, |
|
"learning_rate": 1.659782217879778e-05, |
|
"loss": 0.5317, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.4943207632894139, |
|
"grad_norm": 0.22131531803306617, |
|
"learning_rate": 1.655297696482534e-05, |
|
"loss": 0.5116, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.4952294411631077, |
|
"grad_norm": 0.2095981771825046, |
|
"learning_rate": 1.6508117715224834e-05, |
|
"loss": 0.5149, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.49613811903680144, |
|
"grad_norm": 0.21419702778901387, |
|
"learning_rate": 1.646324483542902e-05, |
|
"loss": 0.517, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.49704679691049525, |
|
"grad_norm": 0.22616880756921168, |
|
"learning_rate": 1.6418358730993862e-05, |
|
"loss": 0.5114, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.497955474784189, |
|
"grad_norm": 0.2184048869089771, |
|
"learning_rate": 1.637345980759481e-05, |
|
"loss": 0.5362, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.49886415265788275, |
|
"grad_norm": 0.22632003781828508, |
|
"learning_rate": 1.6328548471023224e-05, |
|
"loss": 0.4988, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.49977283053157656, |
|
"grad_norm": 0.209816073482319, |
|
"learning_rate": 1.6283625127182596e-05, |
|
"loss": 0.4919, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5006815084052704, |
|
"grad_norm": 0.25442817782757327, |
|
"learning_rate": 1.623869018208499e-05, |
|
"loss": 0.5227, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.5015901862789641, |
|
"grad_norm": 0.24267619179624578, |
|
"learning_rate": 1.6193744041847277e-05, |
|
"loss": 0.5388, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.5024988641526579, |
|
"grad_norm": 0.2228324843248023, |
|
"learning_rate": 1.6148787112687544e-05, |
|
"loss": 0.5087, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.5034075420263516, |
|
"grad_norm": 0.3352682201561984, |
|
"learning_rate": 1.6103819800921363e-05, |
|
"loss": 0.5051, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.5043162199000454, |
|
"grad_norm": 0.24806511392962588, |
|
"learning_rate": 1.605884251295815e-05, |
|
"loss": 0.4982, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5052248977737392, |
|
"grad_norm": 0.24952500063687508, |
|
"learning_rate": 1.6013855655297498e-05, |
|
"loss": 0.5014, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.506133575647433, |
|
"grad_norm": 0.21538151492096425, |
|
"learning_rate": 1.5968859634525462e-05, |
|
"loss": 0.5072, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.5070422535211268, |
|
"grad_norm": 0.23504846953730568, |
|
"learning_rate": 1.592385485731094e-05, |
|
"loss": 0.5164, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.5079509313948205, |
|
"grad_norm": 0.24076075119687024, |
|
"learning_rate": 1.5878841730401945e-05, |
|
"loss": 0.4991, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.5088596092685143, |
|
"grad_norm": 0.24918458142521988, |
|
"learning_rate": 1.583382066062198e-05, |
|
"loss": 0.5259, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5097682871422081, |
|
"grad_norm": 0.22309906969227764, |
|
"learning_rate": 1.5788792054866314e-05, |
|
"loss": 0.5177, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.5106769650159019, |
|
"grad_norm": 0.22622168642080814, |
|
"learning_rate": 1.5743756320098334e-05, |
|
"loss": 0.5167, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.5115856428895956, |
|
"grad_norm": 0.2328779476599096, |
|
"learning_rate": 1.5698713863345858e-05, |
|
"loss": 0.5028, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.5124943207632894, |
|
"grad_norm": 0.22131686640647605, |
|
"learning_rate": 1.565366509169745e-05, |
|
"loss": 0.5243, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.5134029986369832, |
|
"grad_norm": 0.22087250704443534, |
|
"learning_rate": 1.5608610412298758e-05, |
|
"loss": 0.5322, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.514311676510677, |
|
"grad_norm": 0.23457527179046703, |
|
"learning_rate": 1.5563550232348813e-05, |
|
"loss": 0.4915, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.5152203543843707, |
|
"grad_norm": 0.22849851842588884, |
|
"learning_rate": 1.5518484959096365e-05, |
|
"loss": 0.5049, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.5161290322580645, |
|
"grad_norm": 0.22145239435825756, |
|
"learning_rate": 1.54734149998362e-05, |
|
"loss": 0.5063, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.5170377101317583, |
|
"grad_norm": 0.2400202215469711, |
|
"learning_rate": 1.5428340761905444e-05, |
|
"loss": 0.5114, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.5179463880054521, |
|
"grad_norm": 0.2493745198857758, |
|
"learning_rate": 1.5383262652679914e-05, |
|
"loss": 0.5155, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5188550658791459, |
|
"grad_norm": 0.23320737861124252, |
|
"learning_rate": 1.533818107957038e-05, |
|
"loss": 0.5267, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.5197637437528396, |
|
"grad_norm": 0.20584929895864323, |
|
"learning_rate": 1.5293096450018966e-05, |
|
"loss": 0.5057, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.5206724216265334, |
|
"grad_norm": 0.23493566008214234, |
|
"learning_rate": 1.524800917149538e-05, |
|
"loss": 0.4921, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.5215810995002271, |
|
"grad_norm": 0.21564463992482097, |
|
"learning_rate": 1.5202919651493284e-05, |
|
"loss": 0.5006, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.522489777373921, |
|
"grad_norm": 0.21648649893102492, |
|
"learning_rate": 1.51578282975266e-05, |
|
"loss": 0.483, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.5233984552476147, |
|
"grad_norm": 0.21837001678688414, |
|
"learning_rate": 1.511273551712583e-05, |
|
"loss": 0.517, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.5243071331213085, |
|
"grad_norm": 0.2193055231409076, |
|
"learning_rate": 1.5067641717834358e-05, |
|
"loss": 0.4995, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.5252158109950023, |
|
"grad_norm": 0.1932688376041214, |
|
"learning_rate": 1.5022547307204778e-05, |
|
"loss": 0.4947, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.526124488868696, |
|
"grad_norm": 0.23059329760041095, |
|
"learning_rate": 1.4977452692795223e-05, |
|
"loss": 0.5208, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.5270331667423899, |
|
"grad_norm": 0.2172703691219654, |
|
"learning_rate": 1.4932358282165646e-05, |
|
"loss": 0.5049, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5279418446160836, |
|
"grad_norm": 0.21149601085439262, |
|
"learning_rate": 1.4887264482874173e-05, |
|
"loss": 0.5061, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.5288505224897774, |
|
"grad_norm": 0.22167405172460763, |
|
"learning_rate": 1.4842171702473399e-05, |
|
"loss": 0.5046, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.5297592003634711, |
|
"grad_norm": 0.22804024341176546, |
|
"learning_rate": 1.4797080348506718e-05, |
|
"loss": 0.5299, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.5306678782371649, |
|
"grad_norm": 0.2404165803826497, |
|
"learning_rate": 1.4751990828504623e-05, |
|
"loss": 0.5163, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.5315765561108587, |
|
"grad_norm": 0.2215909140593772, |
|
"learning_rate": 1.4706903549981035e-05, |
|
"loss": 0.5071, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5324852339845525, |
|
"grad_norm": 0.2022298755605557, |
|
"learning_rate": 1.466181892042962e-05, |
|
"loss": 0.5233, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.5333939118582463, |
|
"grad_norm": 0.22151314608281256, |
|
"learning_rate": 1.4616737347320093e-05, |
|
"loss": 0.5044, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.53430258973194, |
|
"grad_norm": 0.21798627566866305, |
|
"learning_rate": 1.4571659238094557e-05, |
|
"loss": 0.496, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.5352112676056338, |
|
"grad_norm": 0.20560902855692786, |
|
"learning_rate": 1.4526585000163802e-05, |
|
"loss": 0.5013, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.5361199454793276, |
|
"grad_norm": 0.22636790116887076, |
|
"learning_rate": 1.4481515040903639e-05, |
|
"loss": 0.514, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5370286233530214, |
|
"grad_norm": 0.20664895378738649, |
|
"learning_rate": 1.4436449767651191e-05, |
|
"loss": 0.51, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.5379373012267151, |
|
"grad_norm": 0.21391101168708013, |
|
"learning_rate": 1.4391389587701245e-05, |
|
"loss": 0.5091, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.5388459791004089, |
|
"grad_norm": 0.22392843939385165, |
|
"learning_rate": 1.4346334908302555e-05, |
|
"loss": 0.5291, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.5397546569741026, |
|
"grad_norm": 0.20828111344927408, |
|
"learning_rate": 1.4301286136654146e-05, |
|
"loss": 0.4944, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.5406633348477965, |
|
"grad_norm": 0.2224041235793703, |
|
"learning_rate": 1.4256243679901665e-05, |
|
"loss": 0.493, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5415720127214902, |
|
"grad_norm": 0.22097320718171637, |
|
"learning_rate": 1.4211207945133685e-05, |
|
"loss": 0.5216, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.542480690595184, |
|
"grad_norm": 0.19560322888351336, |
|
"learning_rate": 1.4166179339378022e-05, |
|
"loss": 0.5001, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.5433893684688778, |
|
"grad_norm": 0.2090090684822388, |
|
"learning_rate": 1.4121158269598054e-05, |
|
"loss": 0.4927, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.5442980463425715, |
|
"grad_norm": 0.22417226373585894, |
|
"learning_rate": 1.4076145142689065e-05, |
|
"loss": 0.5283, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.5452067242162654, |
|
"grad_norm": 0.24271982418731053, |
|
"learning_rate": 1.4031140365474543e-05, |
|
"loss": 0.481, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5461154020899591, |
|
"grad_norm": 0.23001454521139977, |
|
"learning_rate": 1.398614434470251e-05, |
|
"loss": 0.5336, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.5470240799636529, |
|
"grad_norm": 0.263483773100571, |
|
"learning_rate": 1.3941157487041848e-05, |
|
"loss": 0.508, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.5479327578373466, |
|
"grad_norm": 0.20548872823129907, |
|
"learning_rate": 1.389618019907864e-05, |
|
"loss": 0.5033, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.5488414357110404, |
|
"grad_norm": 0.23513026738046738, |
|
"learning_rate": 1.385121288731246e-05, |
|
"loss": 0.5101, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.5497501135847342, |
|
"grad_norm": 0.22755356478541824, |
|
"learning_rate": 1.3806255958152725e-05, |
|
"loss": 0.5212, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.550658791458428, |
|
"grad_norm": 0.21734920044910347, |
|
"learning_rate": 1.3761309817915017e-05, |
|
"loss": 0.5133, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.5515674693321218, |
|
"grad_norm": 0.22006575841027068, |
|
"learning_rate": 1.3716374872817408e-05, |
|
"loss": 0.4916, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.5524761472058155, |
|
"grad_norm": 0.21806084980407606, |
|
"learning_rate": 1.3671451528976782e-05, |
|
"loss": 0.5062, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.5533848250795094, |
|
"grad_norm": 0.26445597597573617, |
|
"learning_rate": 1.3626540192405188e-05, |
|
"loss": 0.5115, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.5542935029532031, |
|
"grad_norm": 0.23453239295975004, |
|
"learning_rate": 1.3581641269006142e-05, |
|
"loss": 0.521, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5552021808268969, |
|
"grad_norm": 0.198955340760059, |
|
"learning_rate": 1.3536755164570977e-05, |
|
"loss": 0.5017, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.5561108587005906, |
|
"grad_norm": 0.2384141322061314, |
|
"learning_rate": 1.3491882284775165e-05, |
|
"loss": 0.5379, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.5570195365742844, |
|
"grad_norm": 0.3533164212329243, |
|
"learning_rate": 1.3447023035174662e-05, |
|
"loss": 0.5106, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.5579282144479782, |
|
"grad_norm": 0.23335137358636715, |
|
"learning_rate": 1.340217782120222e-05, |
|
"loss": 0.5029, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.558836892321672, |
|
"grad_norm": 0.2132941094220617, |
|
"learning_rate": 1.3357347048163767e-05, |
|
"loss": 0.5089, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5597455701953657, |
|
"grad_norm": 0.21070445466929358, |
|
"learning_rate": 1.331253112123469e-05, |
|
"loss": 0.5041, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.5606542480690595, |
|
"grad_norm": 0.22376638889170286, |
|
"learning_rate": 1.3267730445456208e-05, |
|
"loss": 0.5256, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.5615629259427533, |
|
"grad_norm": 0.21326176673695557, |
|
"learning_rate": 1.3222945425731705e-05, |
|
"loss": 0.4967, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.5624716038164471, |
|
"grad_norm": 0.21837723498723205, |
|
"learning_rate": 1.3178176466823057e-05, |
|
"loss": 0.4915, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.5633802816901409, |
|
"grad_norm": 0.21674240459535304, |
|
"learning_rate": 1.3133423973346996e-05, |
|
"loss": 0.5228, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5642889595638346, |
|
"grad_norm": 0.2089400672590786, |
|
"learning_rate": 1.3088688349771425e-05, |
|
"loss": 0.4983, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.5651976374375284, |
|
"grad_norm": 0.2178503682465244, |
|
"learning_rate": 1.3043970000411811e-05, |
|
"loss": 0.493, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.5661063153112221, |
|
"grad_norm": 0.21949719597501016, |
|
"learning_rate": 1.2999269329427459e-05, |
|
"loss": 0.4952, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.567014993184916, |
|
"grad_norm": 0.21843384021789625, |
|
"learning_rate": 1.2954586740817927e-05, |
|
"loss": 0.5005, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.5679236710586097, |
|
"grad_norm": 0.20757207890878002, |
|
"learning_rate": 1.2909922638419336e-05, |
|
"loss": 0.5024, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5688323489323035, |
|
"grad_norm": 0.20851651967649487, |
|
"learning_rate": 1.2865277425900725e-05, |
|
"loss": 0.5044, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.5697410268059973, |
|
"grad_norm": 0.21405316788676493, |
|
"learning_rate": 1.2820651506760419e-05, |
|
"loss": 0.537, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.570649704679691, |
|
"grad_norm": 0.22159240306473557, |
|
"learning_rate": 1.277604528432237e-05, |
|
"loss": 0.5218, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.5715583825533849, |
|
"grad_norm": 0.20925418698672665, |
|
"learning_rate": 1.2731459161732513e-05, |
|
"loss": 0.5431, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.5724670604270786, |
|
"grad_norm": 0.21144929339922172, |
|
"learning_rate": 1.2686893541955113e-05, |
|
"loss": 0.5115, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5733757383007724, |
|
"grad_norm": 0.2261368753908665, |
|
"learning_rate": 1.2642348827769152e-05, |
|
"loss": 0.5267, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.5742844161744661, |
|
"grad_norm": 0.20351693400728962, |
|
"learning_rate": 1.2597825421764647e-05, |
|
"loss": 0.5047, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.5751930940481599, |
|
"grad_norm": 0.21090532223303304, |
|
"learning_rate": 1.2553323726339052e-05, |
|
"loss": 0.5089, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.5761017719218537, |
|
"grad_norm": 0.21093740956542803, |
|
"learning_rate": 1.2508844143693595e-05, |
|
"loss": 0.5068, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.5770104497955475, |
|
"grad_norm": 0.23232720167531504, |
|
"learning_rate": 1.2464387075829654e-05, |
|
"loss": 0.4898, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5779191276692413, |
|
"grad_norm": 0.20842996506303982, |
|
"learning_rate": 1.2419952924545125e-05, |
|
"loss": 0.5187, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.578827805542935, |
|
"grad_norm": 0.2129656762015234, |
|
"learning_rate": 1.2375542091430776e-05, |
|
"loss": 0.5005, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.5797364834166288, |
|
"grad_norm": 0.20955597848686514, |
|
"learning_rate": 1.2331154977866638e-05, |
|
"loss": 0.5039, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.5806451612903226, |
|
"grad_norm": 0.2153077918489364, |
|
"learning_rate": 1.2286791985018356e-05, |
|
"loss": 0.5143, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.5815538391640164, |
|
"grad_norm": 0.20026489084845642, |
|
"learning_rate": 1.2242453513833584e-05, |
|
"loss": 0.4947, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.5824625170377101, |
|
"grad_norm": 0.1983389631994418, |
|
"learning_rate": 1.2198139965038356e-05, |
|
"loss": 0.5286, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.5833711949114039, |
|
"grad_norm": 0.21541284455802412, |
|
"learning_rate": 1.2153851739133452e-05, |
|
"loss": 0.5181, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.5842798727850976, |
|
"grad_norm": 0.2286012335295052, |
|
"learning_rate": 1.210958923639079e-05, |
|
"loss": 0.5091, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.5851885506587915, |
|
"grad_norm": 0.21935071020420469, |
|
"learning_rate": 1.20653528568498e-05, |
|
"loss": 0.5168, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.5860972285324852, |
|
"grad_norm": 0.21372980356378826, |
|
"learning_rate": 1.2021143000313822e-05, |
|
"loss": 0.5009, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.587005906406179, |
|
"grad_norm": 0.22775805133335744, |
|
"learning_rate": 1.1976960066346474e-05, |
|
"loss": 0.4944, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.5879145842798728, |
|
"grad_norm": 0.24768343363930162, |
|
"learning_rate": 1.1932804454268057e-05, |
|
"loss": 0.5314, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.5888232621535665, |
|
"grad_norm": 0.2234310813880864, |
|
"learning_rate": 1.1888676563151942e-05, |
|
"loss": 0.5195, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.5897319400272604, |
|
"grad_norm": 0.2009851586835417, |
|
"learning_rate": 1.184457679182096e-05, |
|
"loss": 0.5122, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.5906406179009541, |
|
"grad_norm": 0.21317085471927943, |
|
"learning_rate": 1.18005055388438e-05, |
|
"loss": 0.4985, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.5915492957746479, |
|
"grad_norm": 0.30946744552229544, |
|
"learning_rate": 1.1756463202531392e-05, |
|
"loss": 0.5005, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.5924579736483416, |
|
"grad_norm": 0.23466972573336475, |
|
"learning_rate": 1.1712450180933342e-05, |
|
"loss": 0.5225, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.5933666515220355, |
|
"grad_norm": 0.2056155161824234, |
|
"learning_rate": 1.1668466871834288e-05, |
|
"loss": 0.515, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.5942753293957292, |
|
"grad_norm": 0.23023528471242866, |
|
"learning_rate": 1.1624513672750358e-05, |
|
"loss": 0.4999, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.595184007269423, |
|
"grad_norm": 0.21059005559472435, |
|
"learning_rate": 1.1580590980925527e-05, |
|
"loss": 0.4945, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.5960926851431168, |
|
"grad_norm": 0.2110617476567799, |
|
"learning_rate": 1.1536699193328063e-05, |
|
"loss": 0.5198, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.5970013630168105, |
|
"grad_norm": 0.22093511309521258, |
|
"learning_rate": 1.1492838706646913e-05, |
|
"loss": 0.5226, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.5979100408905044, |
|
"grad_norm": 0.19846566764978263, |
|
"learning_rate": 1.1449009917288136e-05, |
|
"loss": 0.4993, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.5988187187641981, |
|
"grad_norm": 0.20467790497594238, |
|
"learning_rate": 1.1405213221371318e-05, |
|
"loss": 0.4903, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.5997273966378919, |
|
"grad_norm": 0.19548151786525753, |
|
"learning_rate": 1.1361449014725974e-05, |
|
"loss": 0.5188, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6006360745115856, |
|
"grad_norm": 0.1955762705848056, |
|
"learning_rate": 1.1317717692888014e-05, |
|
"loss": 0.4858, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.6015447523852794, |
|
"grad_norm": 0.20843634480541331, |
|
"learning_rate": 1.1274019651096103e-05, |
|
"loss": 0.5062, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.6024534302589732, |
|
"grad_norm": 0.22451284092117968, |
|
"learning_rate": 1.123035528428815e-05, |
|
"loss": 0.534, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.603362108132667, |
|
"grad_norm": 0.2136614171554175, |
|
"learning_rate": 1.11867249870977e-05, |
|
"loss": 0.51, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.6042707860063607, |
|
"grad_norm": 0.21850302263862784, |
|
"learning_rate": 1.1143129153850385e-05, |
|
"loss": 0.5172, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.6051794638800545, |
|
"grad_norm": 0.21996125799654462, |
|
"learning_rate": 1.1099568178560356e-05, |
|
"loss": 0.5067, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.6060881417537483, |
|
"grad_norm": 0.23093396526077578, |
|
"learning_rate": 1.105604245492673e-05, |
|
"loss": 0.516, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.6069968196274421, |
|
"grad_norm": 0.20379662572062934, |
|
"learning_rate": 1.101255237633001e-05, |
|
"loss": 0.4867, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.6079054975011359, |
|
"grad_norm": 0.19874639932072963, |
|
"learning_rate": 1.0969098335828547e-05, |
|
"loss": 0.4767, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.6088141753748296, |
|
"grad_norm": 0.20562885391729782, |
|
"learning_rate": 1.0925680726154994e-05, |
|
"loss": 0.5092, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6097228532485234, |
|
"grad_norm": 0.18948027626777414, |
|
"learning_rate": 1.0882299939712727e-05, |
|
"loss": 0.5281, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.6106315311222171, |
|
"grad_norm": 0.20962122086701915, |
|
"learning_rate": 1.0838956368572335e-05, |
|
"loss": 0.5055, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.611540208995911, |
|
"grad_norm": 0.22852512891883703, |
|
"learning_rate": 1.0795650404468047e-05, |
|
"loss": 0.4874, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.6124488868696047, |
|
"grad_norm": 0.2101171091267797, |
|
"learning_rate": 1.0752382438794224e-05, |
|
"loss": 0.5212, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.6133575647432985, |
|
"grad_norm": 0.19332227679137917, |
|
"learning_rate": 1.0709152862601775e-05, |
|
"loss": 0.511, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.6142662426169923, |
|
"grad_norm": 0.20969920743518988, |
|
"learning_rate": 1.0665962066594666e-05, |
|
"loss": 0.4712, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.615174920490686, |
|
"grad_norm": 0.22729746345428423, |
|
"learning_rate": 1.0622810441126372e-05, |
|
"loss": 0.5073, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.6160835983643799, |
|
"grad_norm": 0.20566804358469726, |
|
"learning_rate": 1.0579698376196339e-05, |
|
"loss": 0.5066, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.6169922762380736, |
|
"grad_norm": 0.20020676818980052, |
|
"learning_rate": 1.0536626261446475e-05, |
|
"loss": 0.5248, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.6179009541117674, |
|
"grad_norm": 0.21155144921065883, |
|
"learning_rate": 1.049359448615763e-05, |
|
"loss": 0.4865, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6188096319854611, |
|
"grad_norm": 0.23535876386613103, |
|
"learning_rate": 1.0450603439246063e-05, |
|
"loss": 0.5168, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.6197183098591549, |
|
"grad_norm": 0.2132538834911777, |
|
"learning_rate": 1.0407653509259933e-05, |
|
"loss": 0.5071, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.6206269877328487, |
|
"grad_norm": 0.21538594477453463, |
|
"learning_rate": 1.036474508437579e-05, |
|
"loss": 0.5149, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.6215356656065425, |
|
"grad_norm": 0.20645694082439553, |
|
"learning_rate": 1.0321878552395073e-05, |
|
"loss": 0.5114, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.6224443434802363, |
|
"grad_norm": 0.2281953724292079, |
|
"learning_rate": 1.027905430074058e-05, |
|
"loss": 0.4916, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.62335302135393, |
|
"grad_norm": 0.20946929959943567, |
|
"learning_rate": 1.0236272716453012e-05, |
|
"loss": 0.4813, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.6242616992276238, |
|
"grad_norm": 0.19038681295735432, |
|
"learning_rate": 1.0193534186187423e-05, |
|
"loss": 0.4909, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.6251703771013176, |
|
"grad_norm": 0.28388068586479065, |
|
"learning_rate": 1.015083909620976e-05, |
|
"loss": 0.5172, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.6260790549750114, |
|
"grad_norm": 0.19357100963981475, |
|
"learning_rate": 1.0108187832393359e-05, |
|
"loss": 0.4947, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.6269877328487051, |
|
"grad_norm": 0.19797701784277713, |
|
"learning_rate": 1.0065580780215457e-05, |
|
"loss": 0.4962, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6278964107223989, |
|
"grad_norm": 0.1972578079022867, |
|
"learning_rate": 1.0023018324753726e-05, |
|
"loss": 0.4913, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.6288050885960926, |
|
"grad_norm": 0.212635477184621, |
|
"learning_rate": 9.980500850682752e-06, |
|
"loss": 0.5025, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.6297137664697865, |
|
"grad_norm": 0.2326728400938466, |
|
"learning_rate": 9.938028742270612e-06, |
|
"loss": 0.4956, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.6306224443434802, |
|
"grad_norm": 0.20880774946321953, |
|
"learning_rate": 9.895602383375355e-06, |
|
"loss": 0.5106, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.631531122217174, |
|
"grad_norm": 0.20984655979580405, |
|
"learning_rate": 9.853222157441562e-06, |
|
"loss": 0.488, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.6324398000908678, |
|
"grad_norm": 0.21557027206249252, |
|
"learning_rate": 9.810888447496859e-06, |
|
"loss": 0.4903, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.6333484779645616, |
|
"grad_norm": 0.262479391876784, |
|
"learning_rate": 9.768601636148473e-06, |
|
"loss": 0.506, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.6342571558382554, |
|
"grad_norm": 0.2168847269340925, |
|
"learning_rate": 9.726362105579762e-06, |
|
"loss": 0.5274, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.6351658337119491, |
|
"grad_norm": 0.2100750942914297, |
|
"learning_rate": 9.684170237546783e-06, |
|
"loss": 0.5095, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.6360745115856429, |
|
"grad_norm": 0.20337953864837427, |
|
"learning_rate": 9.642026413374792e-06, |
|
"loss": 0.5025, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6369831894593366, |
|
"grad_norm": 0.19482375820759582, |
|
"learning_rate": 9.599931013954858e-06, |
|
"loss": 0.4989, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.6378918673330305, |
|
"grad_norm": 0.20122907456598915, |
|
"learning_rate": 9.557884419740387e-06, |
|
"loss": 0.4894, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.6388005452067242, |
|
"grad_norm": 0.21336177694342048, |
|
"learning_rate": 9.515887010743677e-06, |
|
"loss": 0.5042, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.639709223080418, |
|
"grad_norm": 0.21951281624554214, |
|
"learning_rate": 9.473939166532506e-06, |
|
"loss": 0.5129, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.6406179009541118, |
|
"grad_norm": 0.20685330838999208, |
|
"learning_rate": 9.432041266226686e-06, |
|
"loss": 0.4882, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6415265788278055, |
|
"grad_norm": 0.19848178888090984, |
|
"learning_rate": 9.390193688494657e-06, |
|
"loss": 0.5163, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.6424352567014994, |
|
"grad_norm": 0.20764755511633418, |
|
"learning_rate": 9.348396811550025e-06, |
|
"loss": 0.5157, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.6433439345751931, |
|
"grad_norm": 0.20487025317138272, |
|
"learning_rate": 9.306651013148185e-06, |
|
"loss": 0.5115, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.6442526124488869, |
|
"grad_norm": 0.2031054214275953, |
|
"learning_rate": 9.264956670582887e-06, |
|
"loss": 0.5091, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.6451612903225806, |
|
"grad_norm": 0.18811587757096598, |
|
"learning_rate": 9.223314160682821e-06, |
|
"loss": 0.5009, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6460699681962744, |
|
"grad_norm": 0.205011127608451, |
|
"learning_rate": 9.181723859808225e-06, |
|
"loss": 0.5251, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.6469786460699682, |
|
"grad_norm": 0.19846646697630305, |
|
"learning_rate": 9.140186143847488e-06, |
|
"loss": 0.5182, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.647887323943662, |
|
"grad_norm": 0.2028834532784052, |
|
"learning_rate": 9.098701388213729e-06, |
|
"loss": 0.4937, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.6487960018173557, |
|
"grad_norm": 0.18360750051219688, |
|
"learning_rate": 9.057269967841415e-06, |
|
"loss": 0.5112, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.6497046796910495, |
|
"grad_norm": 0.21458351259772931, |
|
"learning_rate": 9.015892257182988e-06, |
|
"loss": 0.5143, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6506133575647433, |
|
"grad_norm": 0.2063598971840294, |
|
"learning_rate": 8.974568630205462e-06, |
|
"loss": 0.5196, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.6515220354384371, |
|
"grad_norm": 0.18873835348846954, |
|
"learning_rate": 8.933299460387042e-06, |
|
"loss": 0.5016, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.6524307133121309, |
|
"grad_norm": 0.1993299721324064, |
|
"learning_rate": 8.892085120713762e-06, |
|
"loss": 0.5084, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.6533393911858246, |
|
"grad_norm": 0.21144467605104506, |
|
"learning_rate": 8.850925983676113e-06, |
|
"loss": 0.5089, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.6542480690595184, |
|
"grad_norm": 0.20483282234928168, |
|
"learning_rate": 8.809822421265665e-06, |
|
"loss": 0.5014, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6551567469332121, |
|
"grad_norm": 0.19814430014436735, |
|
"learning_rate": 8.768774804971705e-06, |
|
"loss": 0.5006, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.656065424806906, |
|
"grad_norm": 0.19104520609396944, |
|
"learning_rate": 8.727783505777894e-06, |
|
"loss": 0.5079, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.6569741026805997, |
|
"grad_norm": 0.19824918419214743, |
|
"learning_rate": 8.686848894158905e-06, |
|
"loss": 0.5092, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.6578827805542935, |
|
"grad_norm": 0.19627827278209117, |
|
"learning_rate": 8.645971340077066e-06, |
|
"loss": 0.5037, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.6587914584279873, |
|
"grad_norm": 0.21191569677398264, |
|
"learning_rate": 8.605151212979033e-06, |
|
"loss": 0.505, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.659700136301681, |
|
"grad_norm": 0.19339067359779558, |
|
"learning_rate": 8.564388881792456e-06, |
|
"loss": 0.5068, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.6606088141753749, |
|
"grad_norm": 0.20882673980251068, |
|
"learning_rate": 8.523684714922608e-06, |
|
"loss": 0.5209, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.6615174920490686, |
|
"grad_norm": 0.1922561192899202, |
|
"learning_rate": 8.483039080249092e-06, |
|
"loss": 0.4767, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.6624261699227624, |
|
"grad_norm": 0.20198892951089617, |
|
"learning_rate": 8.442452345122498e-06, |
|
"loss": 0.5142, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.6633348477964561, |
|
"grad_norm": 0.19893991599577202, |
|
"learning_rate": 8.401924876361101e-06, |
|
"loss": 0.4957, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6642435256701499, |
|
"grad_norm": 0.18385193458028223, |
|
"learning_rate": 8.361457040247518e-06, |
|
"loss": 0.5059, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.6651522035438437, |
|
"grad_norm": 0.20111627364657178, |
|
"learning_rate": 8.321049202525428e-06, |
|
"loss": 0.5005, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.6660608814175375, |
|
"grad_norm": 0.1886288138124668, |
|
"learning_rate": 8.280701728396235e-06, |
|
"loss": 0.5, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.6669695592912313, |
|
"grad_norm": 0.17622798353544158, |
|
"learning_rate": 8.240414982515803e-06, |
|
"loss": 0.5118, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.667878237164925, |
|
"grad_norm": 0.19635146761708042, |
|
"learning_rate": 8.200189328991125e-06, |
|
"loss": 0.5193, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.6687869150386188, |
|
"grad_norm": 0.1897240476003535, |
|
"learning_rate": 8.160025131377044e-06, |
|
"loss": 0.4951, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.6696955929123126, |
|
"grad_norm": 0.19822362467607307, |
|
"learning_rate": 8.119922752672994e-06, |
|
"loss": 0.5045, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.6706042707860064, |
|
"grad_norm": 0.21098146480685617, |
|
"learning_rate": 8.079882555319685e-06, |
|
"loss": 0.5013, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.6715129486597001, |
|
"grad_norm": 0.19374590179910672, |
|
"learning_rate": 8.039904901195832e-06, |
|
"loss": 0.5028, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.6724216265333939, |
|
"grad_norm": 0.20027425053961329, |
|
"learning_rate": 7.999990151614895e-06, |
|
"loss": 0.5122, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6733303044070876, |
|
"grad_norm": 0.20684534413735903, |
|
"learning_rate": 7.960138667321822e-06, |
|
"loss": 0.5074, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.6742389822807815, |
|
"grad_norm": 0.19882319560879994, |
|
"learning_rate": 7.920350808489761e-06, |
|
"loss": 0.4847, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.6751476601544752, |
|
"grad_norm": 0.20660789884737718, |
|
"learning_rate": 7.880626934716815e-06, |
|
"loss": 0.4906, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.676056338028169, |
|
"grad_norm": 0.20808779051241447, |
|
"learning_rate": 7.840967405022832e-06, |
|
"loss": 0.4979, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.6769650159018628, |
|
"grad_norm": 0.20415548749844553, |
|
"learning_rate": 7.801372577846091e-06, |
|
"loss": 0.4899, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6778736937755566, |
|
"grad_norm": 0.22772130134969482, |
|
"learning_rate": 7.76184281104011e-06, |
|
"loss": 0.511, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.6787823716492504, |
|
"grad_norm": 0.19909122191248269, |
|
"learning_rate": 7.72237846187039e-06, |
|
"loss": 0.4991, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.6796910495229441, |
|
"grad_norm": 0.19262198987408088, |
|
"learning_rate": 7.682979887011212e-06, |
|
"loss": 0.4761, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.6805997273966379, |
|
"grad_norm": 0.18775529743972974, |
|
"learning_rate": 7.643647442542383e-06, |
|
"loss": 0.513, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.6815084052703316, |
|
"grad_norm": 0.2173785191691545, |
|
"learning_rate": 7.604381483946028e-06, |
|
"loss": 0.5072, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6824170831440255, |
|
"grad_norm": 0.19147025340181428, |
|
"learning_rate": 7.565182366103391e-06, |
|
"loss": 0.5149, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.6833257610177192, |
|
"grad_norm": 0.21129377540626434, |
|
"learning_rate": 7.526050443291621e-06, |
|
"loss": 0.5057, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.684234438891413, |
|
"grad_norm": 0.20406439218144845, |
|
"learning_rate": 7.486986069180552e-06, |
|
"loss": 0.499, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.6851431167651068, |
|
"grad_norm": 0.18926130030422847, |
|
"learning_rate": 7.447989596829518e-06, |
|
"loss": 0.495, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.6860517946388005, |
|
"grad_norm": 0.20858318842417992, |
|
"learning_rate": 7.409061378684187e-06, |
|
"loss": 0.4938, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.6869604725124944, |
|
"grad_norm": 0.2131058546708383, |
|
"learning_rate": 7.370201766573325e-06, |
|
"loss": 0.4913, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.6878691503861881, |
|
"grad_norm": 0.20230975445307423, |
|
"learning_rate": 7.331411111705673e-06, |
|
"loss": 0.4876, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.6887778282598819, |
|
"grad_norm": 0.2053682321742907, |
|
"learning_rate": 7.2926897646667185e-06, |
|
"loss": 0.5141, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.6896865061335756, |
|
"grad_norm": 0.1954233893947322, |
|
"learning_rate": 7.254038075415573e-06, |
|
"loss": 0.5183, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.6905951840072694, |
|
"grad_norm": 0.18580034606326304, |
|
"learning_rate": 7.215456393281777e-06, |
|
"loss": 0.4906, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.6915038618809632, |
|
"grad_norm": 0.19465847889916651, |
|
"learning_rate": 7.176945066962152e-06, |
|
"loss": 0.5151, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.692412539754657, |
|
"grad_norm": 0.19359527460822998, |
|
"learning_rate": 7.138504444517669e-06, |
|
"loss": 0.5101, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.6933212176283507, |
|
"grad_norm": 0.2015981354614617, |
|
"learning_rate": 7.100134873370262e-06, |
|
"loss": 0.5119, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.6942298955020445, |
|
"grad_norm": 0.19680747451784078, |
|
"learning_rate": 7.061836700299735e-06, |
|
"loss": 0.4846, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.6951385733757383, |
|
"grad_norm": 0.20697700768347255, |
|
"learning_rate": 7.023610271440583e-06, |
|
"loss": 0.5047, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.6960472512494321, |
|
"grad_norm": 0.19041125662893202, |
|
"learning_rate": 6.985455932278904e-06, |
|
"loss": 0.5041, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.6969559291231259, |
|
"grad_norm": 0.1903949228408043, |
|
"learning_rate": 6.947374027649247e-06, |
|
"loss": 0.5234, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.6978646069968196, |
|
"grad_norm": 0.1806764266668593, |
|
"learning_rate": 6.909364901731498e-06, |
|
"loss": 0.4892, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.6987732848705134, |
|
"grad_norm": 0.18271924249493374, |
|
"learning_rate": 6.871428898047793e-06, |
|
"loss": 0.4893, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.6996819627442071, |
|
"grad_norm": 0.19917497101420706, |
|
"learning_rate": 6.833566359459399e-06, |
|
"loss": 0.5105, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.700590640617901, |
|
"grad_norm": 0.1863020335714146, |
|
"learning_rate": 6.795777628163599e-06, |
|
"loss": 0.5056, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.7014993184915947, |
|
"grad_norm": 0.18704918812911453, |
|
"learning_rate": 6.758063045690618e-06, |
|
"loss": 0.5128, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.7024079963652885, |
|
"grad_norm": 0.19036154965202196, |
|
"learning_rate": 6.720422952900545e-06, |
|
"loss": 0.5109, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.7033166742389823, |
|
"grad_norm": 0.19259035264174634, |
|
"learning_rate": 6.6828576899802265e-06, |
|
"loss": 0.4928, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.704225352112676, |
|
"grad_norm": 0.21312159007001835, |
|
"learning_rate": 6.645367596440202e-06, |
|
"loss": 0.5158, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.7051340299863699, |
|
"grad_norm": 0.20251937662605798, |
|
"learning_rate": 6.607953011111655e-06, |
|
"loss": 0.4764, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.7060427078600636, |
|
"grad_norm": 0.18240217297545883, |
|
"learning_rate": 6.570614272143327e-06, |
|
"loss": 0.4903, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.7069513857337574, |
|
"grad_norm": 0.20130547911212013, |
|
"learning_rate": 6.533351716998466e-06, |
|
"loss": 0.4906, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.7078600636074511, |
|
"grad_norm": 0.1959544034246125, |
|
"learning_rate": 6.496165682451772e-06, |
|
"loss": 0.5112, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.7087687414811449, |
|
"grad_norm": 0.19921216780711878, |
|
"learning_rate": 6.459056504586382e-06, |
|
"loss": 0.523, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7096774193548387, |
|
"grad_norm": 0.19936125578543795, |
|
"learning_rate": 6.4220245187907915e-06, |
|
"loss": 0.5229, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.7105860972285325, |
|
"grad_norm": 0.20670310351708626, |
|
"learning_rate": 6.3850700597558465e-06, |
|
"loss": 0.499, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.7114947751022263, |
|
"grad_norm": 0.19209470383493454, |
|
"learning_rate": 6.348193461471723e-06, |
|
"loss": 0.4991, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.71240345297592, |
|
"grad_norm": 0.18972382968362497, |
|
"learning_rate": 6.3113950572248995e-06, |
|
"loss": 0.5097, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.7133121308496138, |
|
"grad_norm": 0.20452856428727872, |
|
"learning_rate": 6.274675179595138e-06, |
|
"loss": 0.5056, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.7142208087233076, |
|
"grad_norm": 0.19987737706875874, |
|
"learning_rate": 6.238034160452486e-06, |
|
"loss": 0.5106, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.7151294865970014, |
|
"grad_norm": 0.20681260557647593, |
|
"learning_rate": 6.2014723309542885e-06, |
|
"loss": 0.4914, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.7160381644706951, |
|
"grad_norm": 0.18866635094855325, |
|
"learning_rate": 6.164990021542173e-06, |
|
"loss": 0.4946, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.7169468423443889, |
|
"grad_norm": 0.20387756471884397, |
|
"learning_rate": 6.128587561939069e-06, |
|
"loss": 0.5067, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.7178555202180827, |
|
"grad_norm": 0.2071972801022147, |
|
"learning_rate": 6.092265281146247e-06, |
|
"loss": 0.5013, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7187641980917765, |
|
"grad_norm": 0.1949470338273329, |
|
"learning_rate": 6.05602350744033e-06, |
|
"loss": 0.4913, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.7196728759654702, |
|
"grad_norm": 0.18810199549074258, |
|
"learning_rate": 6.0198625683703155e-06, |
|
"loss": 0.5105, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.720581553839164, |
|
"grad_norm": 0.19860832355578975, |
|
"learning_rate": 5.983782790754624e-06, |
|
"loss": 0.5136, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.7214902317128578, |
|
"grad_norm": 0.20946392891516918, |
|
"learning_rate": 5.94778450067817e-06, |
|
"loss": 0.5229, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.7223989095865516, |
|
"grad_norm": 0.19397171828229967, |
|
"learning_rate": 5.911868023489367e-06, |
|
"loss": 0.5027, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.7233075874602454, |
|
"grad_norm": 0.20693216539779685, |
|
"learning_rate": 5.8760336837972355e-06, |
|
"loss": 0.4838, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.7242162653339391, |
|
"grad_norm": 0.20734602876695452, |
|
"learning_rate": 5.840281805468427e-06, |
|
"loss": 0.4802, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.7251249432076329, |
|
"grad_norm": 0.1899623876568743, |
|
"learning_rate": 5.804612711624336e-06, |
|
"loss": 0.5277, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.7260336210813266, |
|
"grad_norm": 0.17933405146834225, |
|
"learning_rate": 5.769026724638145e-06, |
|
"loss": 0.5018, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.7269422989550205, |
|
"grad_norm": 0.19234978551841816, |
|
"learning_rate": 5.733524166131927e-06, |
|
"loss": 0.4977, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.7278509768287142, |
|
"grad_norm": 0.19968365635750715, |
|
"learning_rate": 5.6981053569737525e-06, |
|
"loss": 0.51, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.728759654702408, |
|
"grad_norm": 0.19239597936997574, |
|
"learning_rate": 5.662770617274754e-06, |
|
"loss": 0.5171, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.7296683325761018, |
|
"grad_norm": 0.21370083537965098, |
|
"learning_rate": 5.627520266386275e-06, |
|
"loss": 0.5181, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.7305770104497955, |
|
"grad_norm": 0.18966960367117558, |
|
"learning_rate": 5.592354622896944e-06, |
|
"loss": 0.4914, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.7314856883234894, |
|
"grad_norm": 0.33869822983935133, |
|
"learning_rate": 5.557274004629831e-06, |
|
"loss": 0.52, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.7323943661971831, |
|
"grad_norm": 0.2394910020691673, |
|
"learning_rate": 5.522278728639544e-06, |
|
"loss": 0.5106, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.7333030440708769, |
|
"grad_norm": 0.19290629969638587, |
|
"learning_rate": 5.487369111209378e-06, |
|
"loss": 0.5161, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.7342117219445706, |
|
"grad_norm": 0.19861433034388595, |
|
"learning_rate": 5.4525454678484625e-06, |
|
"loss": 0.508, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.7351203998182644, |
|
"grad_norm": 0.18958071403243007, |
|
"learning_rate": 5.417808113288907e-06, |
|
"loss": 0.4938, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.7360290776919582, |
|
"grad_norm": 0.2597240847120721, |
|
"learning_rate": 5.3831573614829445e-06, |
|
"loss": 0.507, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.736937755565652, |
|
"grad_norm": 0.19300705218252923, |
|
"learning_rate": 5.348593525600093e-06, |
|
"loss": 0.5026, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.7378464334393458, |
|
"grad_norm": 0.2017457758270549, |
|
"learning_rate": 5.31411691802436e-06, |
|
"loss": 0.5235, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.7387551113130395, |
|
"grad_norm": 0.18594427403779049, |
|
"learning_rate": 5.279727850351373e-06, |
|
"loss": 0.4953, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.7396637891867333, |
|
"grad_norm": 0.18821737436264005, |
|
"learning_rate": 5.245426633385583e-06, |
|
"loss": 0.5017, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.7405724670604271, |
|
"grad_norm": 0.18278935814928868, |
|
"learning_rate": 5.21121357713747e-06, |
|
"loss": 0.5227, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.7414811449341209, |
|
"grad_norm": 0.2467856622630847, |
|
"learning_rate": 5.177088990820725e-06, |
|
"loss": 0.5161, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.7423898228078146, |
|
"grad_norm": 0.19323901146064826, |
|
"learning_rate": 5.1430531828494476e-06, |
|
"loss": 0.5253, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.7432985006815084, |
|
"grad_norm": 0.1882470146628379, |
|
"learning_rate": 5.109106460835373e-06, |
|
"loss": 0.4987, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.7442071785552021, |
|
"grad_norm": 0.19056117982874113, |
|
"learning_rate": 5.075249131585095e-06, |
|
"loss": 0.5047, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.745115856428896, |
|
"grad_norm": 0.1830098479521597, |
|
"learning_rate": 5.04148150109728e-06, |
|
"loss": 0.4853, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7460245343025897, |
|
"grad_norm": 0.22439208981690162, |
|
"learning_rate": 5.0078038745599e-06, |
|
"loss": 0.5064, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.7469332121762835, |
|
"grad_norm": 0.19494742516744892, |
|
"learning_rate": 4.9742165563474945e-06, |
|
"loss": 0.4938, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.7478418900499773, |
|
"grad_norm": 0.18946082176821574, |
|
"learning_rate": 4.940719850018406e-06, |
|
"loss": 0.4969, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.748750567923671, |
|
"grad_norm": 0.19406823767651848, |
|
"learning_rate": 4.907314058312029e-06, |
|
"loss": 0.5002, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.7496592457973649, |
|
"grad_norm": 0.1878236850523878, |
|
"learning_rate": 4.8739994831460795e-06, |
|
"loss": 0.5135, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.7505679236710586, |
|
"grad_norm": 0.18632298196275213, |
|
"learning_rate": 4.840776425613887e-06, |
|
"loss": 0.5021, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.7514766015447524, |
|
"grad_norm": 0.18739694082021402, |
|
"learning_rate": 4.807645185981629e-06, |
|
"loss": 0.5009, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.7523852794184461, |
|
"grad_norm": 0.19406009330598228, |
|
"learning_rate": 4.774606063685671e-06, |
|
"loss": 0.4979, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.7532939572921399, |
|
"grad_norm": 0.2033133910970134, |
|
"learning_rate": 4.74165935732981e-06, |
|
"loss": 0.5149, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.7542026351658337, |
|
"grad_norm": 0.18694027292685755, |
|
"learning_rate": 4.708805364682613e-06, |
|
"loss": 0.487, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7551113130395275, |
|
"grad_norm": 0.19488605237165357, |
|
"learning_rate": 4.676044382674702e-06, |
|
"loss": 0.5134, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.7560199909132213, |
|
"grad_norm": 0.18325539474678315, |
|
"learning_rate": 4.64337670739608e-06, |
|
"loss": 0.5067, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.756928668786915, |
|
"grad_norm": 0.19497076360522683, |
|
"learning_rate": 4.610802634093466e-06, |
|
"loss": 0.5059, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.7578373466606089, |
|
"grad_norm": 0.17460360852298493, |
|
"learning_rate": 4.578322457167594e-06, |
|
"loss": 0.498, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.7587460245343026, |
|
"grad_norm": 0.1872301992724812, |
|
"learning_rate": 4.5459364701706e-06, |
|
"loss": 0.5074, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.7596547024079964, |
|
"grad_norm": 0.1862065608820259, |
|
"learning_rate": 4.513644965803316e-06, |
|
"loss": 0.5076, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.7605633802816901, |
|
"grad_norm": 0.18412311731882353, |
|
"learning_rate": 4.481448235912671e-06, |
|
"loss": 0.4986, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.7614720581553839, |
|
"grad_norm": 0.18372335213077792, |
|
"learning_rate": 4.449346571489024e-06, |
|
"loss": 0.5062, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.7623807360290777, |
|
"grad_norm": 0.18912332542943594, |
|
"learning_rate": 4.417340262663533e-06, |
|
"loss": 0.5299, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.7632894139027715, |
|
"grad_norm": 0.1869943803405463, |
|
"learning_rate": 4.385429598705566e-06, |
|
"loss": 0.502, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7641980917764652, |
|
"grad_norm": 0.17811159329125326, |
|
"learning_rate": 4.353614868020051e-06, |
|
"loss": 0.4949, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.765106769650159, |
|
"grad_norm": 0.17061573326856555, |
|
"learning_rate": 4.3218963581448805e-06, |
|
"loss": 0.506, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.7660154475238528, |
|
"grad_norm": 0.19053231471949375, |
|
"learning_rate": 4.290274355748312e-06, |
|
"loss": 0.495, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.7669241253975466, |
|
"grad_norm": 0.20701343568348013, |
|
"learning_rate": 4.258749146626391e-06, |
|
"loss": 0.4916, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.7678328032712404, |
|
"grad_norm": 0.19297431569075968, |
|
"learning_rate": 4.227321015700348e-06, |
|
"loss": 0.5079, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.7687414811449341, |
|
"grad_norm": 0.17863142407970647, |
|
"learning_rate": 4.195990247014025e-06, |
|
"loss": 0.4922, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.7696501590186279, |
|
"grad_norm": 0.17444816686104248, |
|
"learning_rate": 4.164757123731332e-06, |
|
"loss": 0.5111, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.7705588368923216, |
|
"grad_norm": 0.18582842524471507, |
|
"learning_rate": 4.133621928133666e-06, |
|
"loss": 0.504, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.7714675147660155, |
|
"grad_norm": 0.19319239687061338, |
|
"learning_rate": 4.102584941617356e-06, |
|
"loss": 0.5138, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.7723761926397092, |
|
"grad_norm": 0.18983118802711624, |
|
"learning_rate": 4.07164644469113e-06, |
|
"loss": 0.5116, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.773284870513403, |
|
"grad_norm": 0.1822008351253182, |
|
"learning_rate": 4.04080671697359e-06, |
|
"loss": 0.5241, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.7741935483870968, |
|
"grad_norm": 0.19588345262435536, |
|
"learning_rate": 4.010066037190664e-06, |
|
"loss": 0.4892, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.7751022262607905, |
|
"grad_norm": 0.18127769473449273, |
|
"learning_rate": 3.97942468317309e-06, |
|
"loss": 0.4979, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.7760109041344844, |
|
"grad_norm": 0.17747552772155908, |
|
"learning_rate": 3.948882931853924e-06, |
|
"loss": 0.5038, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.7769195820081781, |
|
"grad_norm": 0.17474735068767536, |
|
"learning_rate": 3.918441059266026e-06, |
|
"loss": 0.5055, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.7778282598818719, |
|
"grad_norm": 0.18110316556024372, |
|
"learning_rate": 3.888099340539548e-06, |
|
"loss": 0.5142, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.7787369377555656, |
|
"grad_norm": 0.17381313511095411, |
|
"learning_rate": 3.857858049899469e-06, |
|
"loss": 0.5008, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.7796456156292594, |
|
"grad_norm": 0.2058649846312629, |
|
"learning_rate": 3.827717460663122e-06, |
|
"loss": 0.5171, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.7805542935029532, |
|
"grad_norm": 0.1994228195418579, |
|
"learning_rate": 3.7976778452376966e-06, |
|
"loss": 0.5121, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.781462971376647, |
|
"grad_norm": 0.19106219581068892, |
|
"learning_rate": 3.7677394751177957e-06, |
|
"loss": 0.5014, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.7823716492503408, |
|
"grad_norm": 0.18060774785185574, |
|
"learning_rate": 3.7379026208829865e-06, |
|
"loss": 0.4961, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.7832803271240345, |
|
"grad_norm": 0.19089962854174491, |
|
"learning_rate": 3.708167552195346e-06, |
|
"loss": 0.5083, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.7841890049977283, |
|
"grad_norm": 0.24135358768565288, |
|
"learning_rate": 3.67853453779702e-06, |
|
"loss": 0.5083, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.7850976828714221, |
|
"grad_norm": 0.1971222558984969, |
|
"learning_rate": 3.6490038455077933e-06, |
|
"loss": 0.4935, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.7860063607451159, |
|
"grad_norm": 0.19382482178376545, |
|
"learning_rate": 3.619575742222695e-06, |
|
"loss": 0.4829, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.7869150386188096, |
|
"grad_norm": 0.19118227592361328, |
|
"learning_rate": 3.5902504939095444e-06, |
|
"loss": 0.4944, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.7878237164925034, |
|
"grad_norm": 0.18091993720611124, |
|
"learning_rate": 3.561028365606588e-06, |
|
"loss": 0.5216, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.7887323943661971, |
|
"grad_norm": 0.175795228137756, |
|
"learning_rate": 3.5319096214200674e-06, |
|
"loss": 0.4925, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.789641072239891, |
|
"grad_norm": 0.18902176692875, |
|
"learning_rate": 3.5028945245218705e-06, |
|
"loss": 0.5055, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.7905497501135847, |
|
"grad_norm": 0.18848539499883823, |
|
"learning_rate": 3.473983337147118e-06, |
|
"loss": 0.5096, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.7914584279872785, |
|
"grad_norm": 0.17412933715596032, |
|
"learning_rate": 3.445176320591806e-06, |
|
"loss": 0.4939, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.7923671058609723, |
|
"grad_norm": 0.17984972956365003, |
|
"learning_rate": 3.4164737352104635e-06, |
|
"loss": 0.5103, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.793275783734666, |
|
"grad_norm": 0.17454380790489255, |
|
"learning_rate": 3.3878758404137627e-06, |
|
"loss": 0.5069, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.7941844616083599, |
|
"grad_norm": 0.17501932127393316, |
|
"learning_rate": 3.359382894666213e-06, |
|
"loss": 0.4994, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.7950931394820536, |
|
"grad_norm": 0.19096499205483844, |
|
"learning_rate": 3.3309951554837882e-06, |
|
"loss": 0.48, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.7960018173557474, |
|
"grad_norm": 0.18864955376852108, |
|
"learning_rate": 3.3027128794316353e-06, |
|
"loss": 0.509, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.7969104952294411, |
|
"grad_norm": 0.18909849882862032, |
|
"learning_rate": 3.274536322121723e-06, |
|
"loss": 0.525, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.797819173103135, |
|
"grad_norm": 0.18298262301252236, |
|
"learning_rate": 3.2464657382105484e-06, |
|
"loss": 0.508, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.7987278509768287, |
|
"grad_norm": 0.19031703336610836, |
|
"learning_rate": 3.2185013813968388e-06, |
|
"loss": 0.4913, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.7996365288505225, |
|
"grad_norm": 0.17991906333216653, |
|
"learning_rate": 3.1906435044192545e-06, |
|
"loss": 0.4957, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.8005452067242163, |
|
"grad_norm": 0.18203061589219746, |
|
"learning_rate": 3.162892359054098e-06, |
|
"loss": 0.5276, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.80145388459791, |
|
"grad_norm": 0.179165724080468, |
|
"learning_rate": 3.1352481961130382e-06, |
|
"loss": 0.4954, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.8023625624716039, |
|
"grad_norm": 0.19297597464700594, |
|
"learning_rate": 3.107711265440866e-06, |
|
"loss": 0.489, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.8032712403452976, |
|
"grad_norm": 0.17949577061893357, |
|
"learning_rate": 3.080281815913203e-06, |
|
"loss": 0.5056, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.8041799182189914, |
|
"grad_norm": 0.18672578901663653, |
|
"learning_rate": 3.0529600954342717e-06, |
|
"loss": 0.5215, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.8050885960926851, |
|
"grad_norm": 0.18219077965912273, |
|
"learning_rate": 3.02574635093466e-06, |
|
"loss": 0.4943, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.8059972739663789, |
|
"grad_norm": 0.1849602022616444, |
|
"learning_rate": 2.998640828369079e-06, |
|
"loss": 0.4999, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.8069059518400727, |
|
"grad_norm": 0.18588003223304814, |
|
"learning_rate": 2.9716437727141348e-06, |
|
"loss": 0.5125, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.8078146297137665, |
|
"grad_norm": 0.1828707964089993, |
|
"learning_rate": 2.9447554279661236e-06, |
|
"loss": 0.5237, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.8087233075874602, |
|
"grad_norm": 0.1834448257924201, |
|
"learning_rate": 2.9179760371388347e-06, |
|
"loss": 0.5028, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.809631985461154, |
|
"grad_norm": 0.1822724655343291, |
|
"learning_rate": 2.8913058422613363e-06, |
|
"loss": 0.4882, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.8105406633348478, |
|
"grad_norm": 0.22324245823221206, |
|
"learning_rate": 2.86474508437579e-06, |
|
"loss": 0.495, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.8114493412085416, |
|
"grad_norm": 0.1785989609888126, |
|
"learning_rate": 2.8382940035352913e-06, |
|
"loss": 0.5016, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.8123580190822354, |
|
"grad_norm": 0.182834626624704, |
|
"learning_rate": 2.811952838801685e-06, |
|
"loss": 0.4924, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.8132666969559291, |
|
"grad_norm": 0.1703850230922861, |
|
"learning_rate": 2.785721828243399e-06, |
|
"loss": 0.4995, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.8141753748296229, |
|
"grad_norm": 0.17624457325668033, |
|
"learning_rate": 2.7596012089333015e-06, |
|
"loss": 0.4966, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.8150840527033166, |
|
"grad_norm": 0.21894574439915565, |
|
"learning_rate": 2.733591216946568e-06, |
|
"loss": 0.4994, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.8159927305770105, |
|
"grad_norm": 0.1813886036339837, |
|
"learning_rate": 2.707692087358521e-06, |
|
"loss": 0.5029, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.8169014084507042, |
|
"grad_norm": 0.2152869543169518, |
|
"learning_rate": 2.681904054242537e-06, |
|
"loss": 0.4945, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.817810086324398, |
|
"grad_norm": 0.18222344404245855, |
|
"learning_rate": 2.6562273506679007e-06, |
|
"loss": 0.4936, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8187187641980918, |
|
"grad_norm": 0.17607757385435371, |
|
"learning_rate": 2.6306622086977288e-06, |
|
"loss": 0.5062, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.8196274420717855, |
|
"grad_norm": 0.19606345598832053, |
|
"learning_rate": 2.6052088593868435e-06, |
|
"loss": 0.5172, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.8205361199454794, |
|
"grad_norm": 0.19039708237380526, |
|
"learning_rate": 2.5798675327796994e-06, |
|
"loss": 0.5086, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.8214447978191731, |
|
"grad_norm": 0.18246641597501811, |
|
"learning_rate": 2.554638457908314e-06, |
|
"loss": 0.5082, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.8223534756928669, |
|
"grad_norm": 0.18183943600553937, |
|
"learning_rate": 2.5295218627901695e-06, |
|
"loss": 0.5109, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.8232621535665606, |
|
"grad_norm": 0.17028064701481918, |
|
"learning_rate": 2.5045179744261864e-06, |
|
"loss": 0.504, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.8241708314402544, |
|
"grad_norm": 0.18673438138373802, |
|
"learning_rate": 2.4796270187986393e-06, |
|
"loss": 0.5253, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.8250795093139482, |
|
"grad_norm": 0.17265401886870885, |
|
"learning_rate": 2.454849220869142e-06, |
|
"loss": 0.4996, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.825988187187642, |
|
"grad_norm": 0.17469166767696584, |
|
"learning_rate": 2.430184804576594e-06, |
|
"loss": 0.5119, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.8268968650613358, |
|
"grad_norm": 0.18899123379640204, |
|
"learning_rate": 2.405633992835164e-06, |
|
"loss": 0.4904, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8278055429350295, |
|
"grad_norm": 0.18294001642802674, |
|
"learning_rate": 2.38119700753228e-06, |
|
"loss": 0.5025, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.8287142208087233, |
|
"grad_norm": 0.17547004300065802, |
|
"learning_rate": 2.3568740695266246e-06, |
|
"loss": 0.5094, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.8296228986824171, |
|
"grad_norm": 0.1752961322019241, |
|
"learning_rate": 2.332665398646122e-06, |
|
"loss": 0.5171, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.8305315765561109, |
|
"grad_norm": 0.1869406585448319, |
|
"learning_rate": 2.308571213685967e-06, |
|
"loss": 0.5053, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.8314402544298046, |
|
"grad_norm": 0.17875534656100106, |
|
"learning_rate": 2.2845917324066522e-06, |
|
"loss": 0.4994, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.8323489323034984, |
|
"grad_norm": 0.18840384284937645, |
|
"learning_rate": 2.260727171531982e-06, |
|
"loss": 0.5187, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.8332576101771921, |
|
"grad_norm": 0.17687582668078677, |
|
"learning_rate": 2.2369777467471198e-06, |
|
"loss": 0.4915, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.834166288050886, |
|
"grad_norm": 0.18235210938136265, |
|
"learning_rate": 2.2133436726966555e-06, |
|
"loss": 0.5014, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.8350749659245797, |
|
"grad_norm": 0.18558447867998393, |
|
"learning_rate": 2.189825162982648e-06, |
|
"loss": 0.5052, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.8359836437982735, |
|
"grad_norm": 0.18104683947912312, |
|
"learning_rate": 2.166422430162693e-06, |
|
"loss": 0.4995, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.8368923216719673, |
|
"grad_norm": 0.18353267749631846, |
|
"learning_rate": 2.1431356857480076e-06, |
|
"loss": 0.4795, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.8378009995456611, |
|
"grad_norm": 0.19613272174120108, |
|
"learning_rate": 2.1199651402015284e-06, |
|
"loss": 0.5199, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.8387096774193549, |
|
"grad_norm": 0.19747489528791062, |
|
"learning_rate": 2.0969110029359922e-06, |
|
"loss": 0.5032, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.8396183552930486, |
|
"grad_norm": 0.1787049444368562, |
|
"learning_rate": 2.073973482312051e-06, |
|
"loss": 0.4967, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.8405270331667424, |
|
"grad_norm": 0.17606081747542848, |
|
"learning_rate": 2.0511527856363916e-06, |
|
"loss": 0.5041, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.8414357110404361, |
|
"grad_norm": 0.17829298424594497, |
|
"learning_rate": 2.028449119159862e-06, |
|
"loss": 0.5078, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.84234438891413, |
|
"grad_norm": 0.1948648000783994, |
|
"learning_rate": 2.0058626880756014e-06, |
|
"loss": 0.5072, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.8432530667878237, |
|
"grad_norm": 0.17927122485664945, |
|
"learning_rate": 1.9833936965171817e-06, |
|
"loss": 0.494, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.8441617446615175, |
|
"grad_norm": 0.1772461681235593, |
|
"learning_rate": 1.961042347556787e-06, |
|
"loss": 0.4923, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.8450704225352113, |
|
"grad_norm": 0.18305292727800654, |
|
"learning_rate": 1.9388088432033446e-06, |
|
"loss": 0.5261, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.845979100408905, |
|
"grad_norm": 0.1803832163167104, |
|
"learning_rate": 1.916693384400722e-06, |
|
"loss": 0.5219, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.8468877782825989, |
|
"grad_norm": 0.17898106251296597, |
|
"learning_rate": 1.8946961710259048e-06, |
|
"loss": 0.5111, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.8477964561562926, |
|
"grad_norm": 0.16947184822172653, |
|
"learning_rate": 1.872817401887197e-06, |
|
"loss": 0.5045, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.8487051340299864, |
|
"grad_norm": 0.17922642087683416, |
|
"learning_rate": 1.8510572747224024e-06, |
|
"loss": 0.4853, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.8496138119036801, |
|
"grad_norm": 0.17977834624667638, |
|
"learning_rate": 1.8294159861970605e-06, |
|
"loss": 0.5038, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.8505224897773739, |
|
"grad_norm": 0.19608404483438538, |
|
"learning_rate": 1.8078937319026655e-06, |
|
"loss": 0.5042, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.8514311676510677, |
|
"grad_norm": 0.1685912256631259, |
|
"learning_rate": 1.7864907063548813e-06, |
|
"loss": 0.5096, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.8523398455247615, |
|
"grad_norm": 0.17660218292868002, |
|
"learning_rate": 1.7652071029918082e-06, |
|
"loss": 0.4781, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.8532485233984552, |
|
"grad_norm": 0.18911622472242354, |
|
"learning_rate": 1.7440431141722152e-06, |
|
"loss": 0.5083, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.854157201272149, |
|
"grad_norm": 0.17455351806613617, |
|
"learning_rate": 1.7229989311738142e-06, |
|
"loss": 0.496, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8550658791458428, |
|
"grad_norm": 0.1995844963464293, |
|
"learning_rate": 1.7020747441915184e-06, |
|
"loss": 0.502, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.8559745570195366, |
|
"grad_norm": 0.1668170199040863, |
|
"learning_rate": 1.6812707423357387e-06, |
|
"loss": 0.5095, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.8568832348932304, |
|
"grad_norm": 0.1948155057914624, |
|
"learning_rate": 1.660587113630656e-06, |
|
"loss": 0.4952, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.8577919127669241, |
|
"grad_norm": 0.18800602958995521, |
|
"learning_rate": 1.6400240450125486e-06, |
|
"loss": 0.4984, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.8587005906406179, |
|
"grad_norm": 0.17254706781068335, |
|
"learning_rate": 1.619581722328073e-06, |
|
"loss": 0.5151, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8596092685143116, |
|
"grad_norm": 0.19375693939108612, |
|
"learning_rate": 1.5992603303325997e-06, |
|
"loss": 0.4976, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.8605179463880055, |
|
"grad_norm": 0.17558775095540655, |
|
"learning_rate": 1.579060052688548e-06, |
|
"loss": 0.5262, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.8614266242616992, |
|
"grad_norm": 0.17688437493103373, |
|
"learning_rate": 1.5589810719637148e-06, |
|
"loss": 0.507, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.862335302135393, |
|
"grad_norm": 0.18642075452138682, |
|
"learning_rate": 1.5390235696296268e-06, |
|
"loss": 0.4876, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.8632439800090868, |
|
"grad_norm": 0.17740686076054013, |
|
"learning_rate": 1.519187726059903e-06, |
|
"loss": 0.5048, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8641526578827805, |
|
"grad_norm": 0.18919878445470903, |
|
"learning_rate": 1.4994737205286375e-06, |
|
"loss": 0.5152, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.8650613357564744, |
|
"grad_norm": 0.1907057801823801, |
|
"learning_rate": 1.47988173120875e-06, |
|
"loss": 0.5234, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.8659700136301681, |
|
"grad_norm": 0.1883206554410589, |
|
"learning_rate": 1.4604119351703953e-06, |
|
"loss": 0.5096, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.8668786915038619, |
|
"grad_norm": 0.17232570863058297, |
|
"learning_rate": 1.441064508379366e-06, |
|
"loss": 0.4783, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.8677873693775556, |
|
"grad_norm": 0.1762050568497747, |
|
"learning_rate": 1.421839625695492e-06, |
|
"loss": 0.4946, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8686960472512494, |
|
"grad_norm": 0.1828489796685002, |
|
"learning_rate": 1.402737460871057e-06, |
|
"loss": 0.4925, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.8696047251249432, |
|
"grad_norm": 0.1874266302232557, |
|
"learning_rate": 1.3837581865492428e-06, |
|
"loss": 0.5129, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.870513402998637, |
|
"grad_norm": 0.17492796891040732, |
|
"learning_rate": 1.3649019742625624e-06, |
|
"loss": 0.4948, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.8714220808723308, |
|
"grad_norm": 0.17856469420360077, |
|
"learning_rate": 1.346168994431302e-06, |
|
"loss": 0.5047, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.8723307587460245, |
|
"grad_norm": 0.17735830612649864, |
|
"learning_rate": 1.3275594163619875e-06, |
|
"loss": 0.5119, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8732394366197183, |
|
"grad_norm": 0.2000254861936507, |
|
"learning_rate": 1.3090734082458562e-06, |
|
"loss": 0.5315, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.8741481144934121, |
|
"grad_norm": 0.18755218675784746, |
|
"learning_rate": 1.2907111371573356e-06, |
|
"loss": 0.5117, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.8750567923671059, |
|
"grad_norm": 0.1924034058294698, |
|
"learning_rate": 1.2724727690525278e-06, |
|
"loss": 0.4963, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.8759654702407996, |
|
"grad_norm": 0.17199231210235086, |
|
"learning_rate": 1.2543584687677163e-06, |
|
"loss": 0.507, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.8768741481144934, |
|
"grad_norm": 0.195405734879678, |
|
"learning_rate": 1.2363684000178788e-06, |
|
"loss": 0.5061, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.8777828259881872, |
|
"grad_norm": 0.18129652277081798, |
|
"learning_rate": 1.2185027253951935e-06, |
|
"loss": 0.4902, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.878691503861881, |
|
"grad_norm": 0.17181670418876158, |
|
"learning_rate": 1.200761606367583e-06, |
|
"loss": 0.4705, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.8796001817355747, |
|
"grad_norm": 0.1782222124923922, |
|
"learning_rate": 1.1831452032772499e-06, |
|
"loss": 0.5054, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.8805088596092685, |
|
"grad_norm": 0.18034452027252415, |
|
"learning_rate": 1.1656536753392288e-06, |
|
"loss": 0.5068, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.8814175374829623, |
|
"grad_norm": 0.16897576149958596, |
|
"learning_rate": 1.148287180639952e-06, |
|
"loss": 0.5087, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.8823262153566561, |
|
"grad_norm": 0.17814376760510925, |
|
"learning_rate": 1.1310458761358057e-06, |
|
"loss": 0.5024, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.8832348932303499, |
|
"grad_norm": 0.1684038013793718, |
|
"learning_rate": 1.113929917651731e-06, |
|
"loss": 0.4988, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.8841435711040436, |
|
"grad_norm": 0.18680522710896053, |
|
"learning_rate": 1.0969394598798005e-06, |
|
"loss": 0.5146, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.8850522489777374, |
|
"grad_norm": 0.18089586831359786, |
|
"learning_rate": 1.0800746563778258e-06, |
|
"loss": 0.5051, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.8859609268514311, |
|
"grad_norm": 0.19317494093940246, |
|
"learning_rate": 1.0633356595679683e-06, |
|
"loss": 0.5025, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.886869604725125, |
|
"grad_norm": 0.27841178390301896, |
|
"learning_rate": 1.0467226207353675e-06, |
|
"loss": 0.4961, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.8877782825988187, |
|
"grad_norm": 0.1661933459945262, |
|
"learning_rate": 1.03023569002677e-06, |
|
"loss": 0.4928, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.8886869604725125, |
|
"grad_norm": 0.17343777056749088, |
|
"learning_rate": 1.0138750164491678e-06, |
|
"loss": 0.5091, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.8895956383462063, |
|
"grad_norm": 0.1889775529950286, |
|
"learning_rate": 9.976407478684518e-07, |
|
"loss": 0.4833, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.8905043162199, |
|
"grad_norm": 0.17817868099181625, |
|
"learning_rate": 9.815330310080889e-07, |
|
"loss": 0.4911, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.8914129940935939, |
|
"grad_norm": 0.16191917789368504, |
|
"learning_rate": 9.655520114477772e-07, |
|
"loss": 0.512, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.8923216719672876, |
|
"grad_norm": 0.18240957967746754, |
|
"learning_rate": 9.496978336221396e-07, |
|
"loss": 0.5045, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.8932303498409814, |
|
"grad_norm": 0.17017266671524892, |
|
"learning_rate": 9.339706408194288e-07, |
|
"loss": 0.4924, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.8941390277146751, |
|
"grad_norm": 0.17413211768706843, |
|
"learning_rate": 9.183705751802074e-07, |
|
"loss": 0.5049, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.8950477055883689, |
|
"grad_norm": 0.18605262758821256, |
|
"learning_rate": 9.028977776960873e-07, |
|
"loss": 0.5159, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.8959563834620627, |
|
"grad_norm": 0.19150551818628364, |
|
"learning_rate": 8.875523882084352e-07, |
|
"loss": 0.4868, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.8968650613357565, |
|
"grad_norm": 0.19088032763098828, |
|
"learning_rate": 8.723345454071308e-07, |
|
"loss": 0.5163, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.8977737392094502, |
|
"grad_norm": 0.16428972487904736, |
|
"learning_rate": 8.57244386829294e-07, |
|
"loss": 0.4949, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.898682417083144, |
|
"grad_norm": 0.16147474098681905, |
|
"learning_rate": 8.422820488580462e-07, |
|
"loss": 0.5111, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.8995910949568378, |
|
"grad_norm": 0.17877177293244242, |
|
"learning_rate": 8.274476667212933e-07, |
|
"loss": 0.4807, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.9004997728305316, |
|
"grad_norm": 0.17023754590293025, |
|
"learning_rate": 8.127413744904805e-07, |
|
"loss": 0.5124, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.9014084507042254, |
|
"grad_norm": 0.17706509947479346, |
|
"learning_rate": 7.981633050793929e-07, |
|
"loss": 0.4979, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.9023171285779191, |
|
"grad_norm": 0.18940926624622828, |
|
"learning_rate": 7.837135902429509e-07, |
|
"loss": 0.5098, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.9032258064516129, |
|
"grad_norm": 0.1689888104493502, |
|
"learning_rate": 7.693923605760294e-07, |
|
"loss": 0.511, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.9041344843253066, |
|
"grad_norm": 0.1736769118973061, |
|
"learning_rate": 7.551997455122573e-07, |
|
"loss": 0.4832, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.9050431621990005, |
|
"grad_norm": 0.16648584513031078, |
|
"learning_rate": 7.411358733228679e-07, |
|
"loss": 0.5054, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.9059518400726942, |
|
"grad_norm": 0.181474553011442, |
|
"learning_rate": 7.272008711155292e-07, |
|
"loss": 0.5086, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.906860517946388, |
|
"grad_norm": 0.17983163521623255, |
|
"learning_rate": 7.133948648331956e-07, |
|
"loss": 0.5099, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.9077691958200818, |
|
"grad_norm": 0.16423253255618706, |
|
"learning_rate": 6.997179792529724e-07, |
|
"loss": 0.4908, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.9086778736937755, |
|
"grad_norm": 0.17029107112544759, |
|
"learning_rate": 6.861703379849815e-07, |
|
"loss": 0.4954, |
|
"step": 1000 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 200, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 976475935997952.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|