|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.6666666666666666, |
|
"eval_steps": 500, |
|
"global_step": 314, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0021231422505307855, |
|
"grad_norm": 6.233692311689662, |
|
"learning_rate": 0.0, |
|
"loss": 1.3677, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004246284501061571, |
|
"grad_norm": 6.03613913611666, |
|
"learning_rate": 2.0833333333333333e-07, |
|
"loss": 1.4092, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.006369426751592357, |
|
"grad_norm": 5.843878249582451, |
|
"learning_rate": 4.1666666666666667e-07, |
|
"loss": 1.4005, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.008492569002123142, |
|
"grad_norm": 6.2100854445399305, |
|
"learning_rate": 6.25e-07, |
|
"loss": 1.4027, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.010615711252653927, |
|
"grad_norm": 5.700197750457959, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 1.3859, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.012738853503184714, |
|
"grad_norm": 5.708735743112818, |
|
"learning_rate": 1.0416666666666667e-06, |
|
"loss": 1.4135, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.014861995753715499, |
|
"grad_norm": 5.584947016091844, |
|
"learning_rate": 1.25e-06, |
|
"loss": 1.3083, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.016985138004246284, |
|
"grad_norm": 5.5017514099891125, |
|
"learning_rate": 1.4583333333333335e-06, |
|
"loss": 1.3761, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01910828025477707, |
|
"grad_norm": 5.240707118749176, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 1.374, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.021231422505307854, |
|
"grad_norm": 5.159923266283754, |
|
"learning_rate": 1.8750000000000003e-06, |
|
"loss": 1.3837, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02335456475583864, |
|
"grad_norm": 4.23439034643152, |
|
"learning_rate": 2.0833333333333334e-06, |
|
"loss": 1.3278, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.025477707006369428, |
|
"grad_norm": 4.23403012426509, |
|
"learning_rate": 2.2916666666666666e-06, |
|
"loss": 1.3095, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.027600849256900213, |
|
"grad_norm": 4.033469595815775, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.3736, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.029723991507430998, |
|
"grad_norm": 3.4140671410294092, |
|
"learning_rate": 2.7083333333333334e-06, |
|
"loss": 1.2966, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03184713375796178, |
|
"grad_norm": 3.2252832240083946, |
|
"learning_rate": 2.916666666666667e-06, |
|
"loss": 1.3211, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03397027600849257, |
|
"grad_norm": 2.958759736658132, |
|
"learning_rate": 3.125e-06, |
|
"loss": 1.284, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.036093418259023353, |
|
"grad_norm": 3.0082219342870764, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.2574, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03821656050955414, |
|
"grad_norm": 2.7607212501829186, |
|
"learning_rate": 3.5416666666666673e-06, |
|
"loss": 1.2222, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.040339702760084924, |
|
"grad_norm": 1.7794382970826705, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 1.1952, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04246284501061571, |
|
"grad_norm": 1.9000490101749923, |
|
"learning_rate": 3.958333333333333e-06, |
|
"loss": 1.0968, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.044585987261146494, |
|
"grad_norm": 2.184749467485857, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 1.2058, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04670912951167728, |
|
"grad_norm": 2.076583497979509, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 1.1726, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04883227176220807, |
|
"grad_norm": 1.6762408968746727, |
|
"learning_rate": 4.583333333333333e-06, |
|
"loss": 1.0816, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.050955414012738856, |
|
"grad_norm": 1.655763485736625, |
|
"learning_rate": 4.791666666666668e-06, |
|
"loss": 1.0881, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05307855626326964, |
|
"grad_norm": 1.404756822747019, |
|
"learning_rate": 5e-06, |
|
"loss": 1.111, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.055201698513800426, |
|
"grad_norm": 1.7450925608231072, |
|
"learning_rate": 4.999938256261134e-06, |
|
"loss": 1.0968, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05732484076433121, |
|
"grad_norm": 1.7455723981125268, |
|
"learning_rate": 4.999753028094368e-06, |
|
"loss": 1.135, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.059447983014861996, |
|
"grad_norm": 1.765180118972071, |
|
"learning_rate": 4.999444324649045e-06, |
|
"loss": 1.1332, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06157112526539278, |
|
"grad_norm": 1.7025986108773807, |
|
"learning_rate": 4.99901216117357e-06, |
|
"loss": 1.1037, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06369426751592357, |
|
"grad_norm": 1.6843781660517996, |
|
"learning_rate": 4.998456559014653e-06, |
|
"loss": 1.1036, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06581740976645435, |
|
"grad_norm": 1.4971568544476521, |
|
"learning_rate": 4.997777545616258e-06, |
|
"loss": 1.0896, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.06794055201698514, |
|
"grad_norm": 1.2471663073080574, |
|
"learning_rate": 4.996975154518245e-06, |
|
"loss": 1.0816, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.07006369426751592, |
|
"grad_norm": 1.1099200791707107, |
|
"learning_rate": 4.996049425354717e-06, |
|
"loss": 1.0408, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.07218683651804671, |
|
"grad_norm": 1.1596577803643031, |
|
"learning_rate": 4.995000403852057e-06, |
|
"loss": 1.0348, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07430997876857749, |
|
"grad_norm": 0.9836703479205239, |
|
"learning_rate": 4.993828141826672e-06, |
|
"loss": 1.0397, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07643312101910828, |
|
"grad_norm": 4.973840678082251, |
|
"learning_rate": 4.992532697182434e-06, |
|
"loss": 1.0692, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07855626326963906, |
|
"grad_norm": 0.9322137357537782, |
|
"learning_rate": 4.991114133907822e-06, |
|
"loss": 1.0056, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.08067940552016985, |
|
"grad_norm": 1.0203537152829516, |
|
"learning_rate": 4.989572522072753e-06, |
|
"loss": 0.9956, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.08280254777070063, |
|
"grad_norm": 1.0385035368583873, |
|
"learning_rate": 4.9879079378251325e-06, |
|
"loss": 0.9772, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.08492569002123142, |
|
"grad_norm": 0.9097103872410344, |
|
"learning_rate": 4.986120463387084e-06, |
|
"loss": 1.0256, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0870488322717622, |
|
"grad_norm": 1.0611463152500384, |
|
"learning_rate": 4.984210187050891e-06, |
|
"loss": 0.9883, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.08917197452229299, |
|
"grad_norm": 0.779249244150645, |
|
"learning_rate": 4.982177203174636e-06, |
|
"loss": 0.9847, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.09129511677282377, |
|
"grad_norm": 0.7658008659883724, |
|
"learning_rate": 4.9800216121775404e-06, |
|
"loss": 0.9829, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.09341825902335456, |
|
"grad_norm": 0.7598772913633557, |
|
"learning_rate": 4.977743520535001e-06, |
|
"loss": 1.0496, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09554140127388536, |
|
"grad_norm": 0.8141928977386432, |
|
"learning_rate": 4.975343040773335e-06, |
|
"loss": 1.0519, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.09766454352441614, |
|
"grad_norm": 0.8911722172701033, |
|
"learning_rate": 4.972820291464219e-06, |
|
"loss": 1.0345, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.09978768577494693, |
|
"grad_norm": 0.8358128773624875, |
|
"learning_rate": 4.970175397218832e-06, |
|
"loss": 0.9987, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.10191082802547771, |
|
"grad_norm": 0.882046205562171, |
|
"learning_rate": 4.967408488681702e-06, |
|
"loss": 1.0004, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1040339702760085, |
|
"grad_norm": 0.8308643665449907, |
|
"learning_rate": 4.964519702524251e-06, |
|
"loss": 0.9867, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.10615711252653928, |
|
"grad_norm": 0.8727814847927396, |
|
"learning_rate": 4.9615091814380465e-06, |
|
"loss": 1.0236, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10828025477707007, |
|
"grad_norm": 0.8128090079858757, |
|
"learning_rate": 4.958377074127751e-06, |
|
"loss": 1.0096, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.11040339702760085, |
|
"grad_norm": 0.7041201796080805, |
|
"learning_rate": 4.955123535303775e-06, |
|
"loss": 1.0161, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.11252653927813164, |
|
"grad_norm": 0.7034976999278052, |
|
"learning_rate": 4.951748725674643e-06, |
|
"loss": 0.9444, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.11464968152866242, |
|
"grad_norm": 0.7111287475587301, |
|
"learning_rate": 4.948252811939044e-06, |
|
"loss": 1.0034, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.11677282377919321, |
|
"grad_norm": 0.7112458705577068, |
|
"learning_rate": 4.944635966777607e-06, |
|
"loss": 1.0413, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.11889596602972399, |
|
"grad_norm": 0.7351439597774859, |
|
"learning_rate": 4.940898368844366e-06, |
|
"loss": 1.0663, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.12101910828025478, |
|
"grad_norm": 0.7120598412888705, |
|
"learning_rate": 4.937040202757937e-06, |
|
"loss": 1.0058, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.12314225053078556, |
|
"grad_norm": 0.8009131327695405, |
|
"learning_rate": 4.933061659092401e-06, |
|
"loss": 1.0048, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.12526539278131635, |
|
"grad_norm": 0.8355353984744548, |
|
"learning_rate": 4.928962934367887e-06, |
|
"loss": 1.0005, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.12738853503184713, |
|
"grad_norm": 0.7292500219199575, |
|
"learning_rate": 4.924744231040865e-06, |
|
"loss": 0.9948, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12951167728237792, |
|
"grad_norm": 0.7441720822069011, |
|
"learning_rate": 4.920405757494147e-06, |
|
"loss": 0.9203, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1316348195329087, |
|
"grad_norm": 0.7164757410132886, |
|
"learning_rate": 4.915947728026599e-06, |
|
"loss": 0.9826, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.1337579617834395, |
|
"grad_norm": 0.6688811212861865, |
|
"learning_rate": 4.911370362842544e-06, |
|
"loss": 1.0041, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.13588110403397027, |
|
"grad_norm": 0.7276933998413905, |
|
"learning_rate": 4.906673888040895e-06, |
|
"loss": 1.0254, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.13800424628450106, |
|
"grad_norm": 0.8980292377631762, |
|
"learning_rate": 4.901858535603982e-06, |
|
"loss": 1.0341, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14012738853503184, |
|
"grad_norm": 0.7213457650159387, |
|
"learning_rate": 4.896924543386099e-06, |
|
"loss": 0.9732, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.14225053078556263, |
|
"grad_norm": 0.6575608860005288, |
|
"learning_rate": 4.891872155101746e-06, |
|
"loss": 0.9758, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.14437367303609341, |
|
"grad_norm": 0.7227626661588851, |
|
"learning_rate": 4.886701620313595e-06, |
|
"loss": 0.9736, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1464968152866242, |
|
"grad_norm": 0.7820534177881011, |
|
"learning_rate": 4.88141319442017e-06, |
|
"loss": 0.9519, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.14861995753715498, |
|
"grad_norm": 0.7464763101124039, |
|
"learning_rate": 4.876007138643216e-06, |
|
"loss": 1.0623, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15074309978768577, |
|
"grad_norm": 0.8031125528787094, |
|
"learning_rate": 4.870483720014814e-06, |
|
"loss": 1.0023, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.15286624203821655, |
|
"grad_norm": 0.7252756266115659, |
|
"learning_rate": 4.8648432113641765e-06, |
|
"loss": 1.025, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.15498938428874734, |
|
"grad_norm": 0.7685738411441779, |
|
"learning_rate": 4.8590858913041775e-06, |
|
"loss": 0.9701, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.15711252653927812, |
|
"grad_norm": 0.7119059019537681, |
|
"learning_rate": 4.853212044217591e-06, |
|
"loss": 0.9471, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1592356687898089, |
|
"grad_norm": 0.670302973142834, |
|
"learning_rate": 4.847221960243042e-06, |
|
"loss": 0.9918, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1613588110403397, |
|
"grad_norm": 0.7274878719799441, |
|
"learning_rate": 4.8411159352606735e-06, |
|
"loss": 0.9114, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.16348195329087048, |
|
"grad_norm": 0.6706478978040786, |
|
"learning_rate": 4.834894270877536e-06, |
|
"loss": 0.9628, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.16560509554140126, |
|
"grad_norm": 0.7197495557043765, |
|
"learning_rate": 4.828557274412686e-06, |
|
"loss": 0.9792, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.16772823779193205, |
|
"grad_norm": 0.6740897649139851, |
|
"learning_rate": 4.822105258882007e-06, |
|
"loss": 0.978, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.16985138004246284, |
|
"grad_norm": 0.7298858435711825, |
|
"learning_rate": 4.815538542982751e-06, |
|
"loss": 1.0749, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17197452229299362, |
|
"grad_norm": 0.7437812096942298, |
|
"learning_rate": 4.808857451077788e-06, |
|
"loss": 1.0027, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1740976645435244, |
|
"grad_norm": 0.7793372891907893, |
|
"learning_rate": 4.802062313179595e-06, |
|
"loss": 1.056, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1762208067940552, |
|
"grad_norm": 0.7200522013491976, |
|
"learning_rate": 4.795153464933948e-06, |
|
"loss": 0.9804, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.17834394904458598, |
|
"grad_norm": 0.6800845586517711, |
|
"learning_rate": 4.7881312476033444e-06, |
|
"loss": 1.0096, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.18046709129511676, |
|
"grad_norm": 0.6876028206701021, |
|
"learning_rate": 4.7809960080501464e-06, |
|
"loss": 0.9886, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.18259023354564755, |
|
"grad_norm": 0.7119685223963281, |
|
"learning_rate": 4.773748098719448e-06, |
|
"loss": 1.019, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.18471337579617833, |
|
"grad_norm": 0.6948951583741163, |
|
"learning_rate": 4.766387877621667e-06, |
|
"loss": 1.0177, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.18683651804670912, |
|
"grad_norm": 0.7252384478947084, |
|
"learning_rate": 4.758915708314858e-06, |
|
"loss": 0.952, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.18895966029723993, |
|
"grad_norm": 0.7132363877921689, |
|
"learning_rate": 4.751331959886758e-06, |
|
"loss": 0.9311, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1910828025477707, |
|
"grad_norm": 0.6672381978657935, |
|
"learning_rate": 4.743637006936552e-06, |
|
"loss": 1.0019, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1932059447983015, |
|
"grad_norm": 0.7074654491248366, |
|
"learning_rate": 4.735831229556374e-06, |
|
"loss": 0.9249, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.19532908704883228, |
|
"grad_norm": 0.8263034494745758, |
|
"learning_rate": 4.727915013312527e-06, |
|
"loss": 0.9627, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.19745222929936307, |
|
"grad_norm": 0.7564665006141713, |
|
"learning_rate": 4.719888749226442e-06, |
|
"loss": 0.9788, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.19957537154989385, |
|
"grad_norm": 0.7401712745437141, |
|
"learning_rate": 4.711752833755362e-06, |
|
"loss": 0.9595, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.20169851380042464, |
|
"grad_norm": 0.7177547311512174, |
|
"learning_rate": 4.70350766877276e-06, |
|
"loss": 1.0125, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.20382165605095542, |
|
"grad_norm": 0.8365049197183004, |
|
"learning_rate": 4.695153661548486e-06, |
|
"loss": 0.9617, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2059447983014862, |
|
"grad_norm": 0.8275524843143772, |
|
"learning_rate": 4.686691224728652e-06, |
|
"loss": 0.9583, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.208067940552017, |
|
"grad_norm": 0.7849897218743559, |
|
"learning_rate": 4.678120776315251e-06, |
|
"loss": 0.9432, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.21019108280254778, |
|
"grad_norm": 0.7266338220692218, |
|
"learning_rate": 4.669442739645506e-06, |
|
"loss": 1.0112, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.21231422505307856, |
|
"grad_norm": 0.6786209555674868, |
|
"learning_rate": 4.660657543370958e-06, |
|
"loss": 1.0122, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.21443736730360935, |
|
"grad_norm": 0.7396143819036078, |
|
"learning_rate": 4.651765621436303e-06, |
|
"loss": 1.0491, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.21656050955414013, |
|
"grad_norm": 0.745708791916274, |
|
"learning_rate": 4.642767413057942e-06, |
|
"loss": 0.966, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.21868365180467092, |
|
"grad_norm": 0.7477607378275345, |
|
"learning_rate": 4.6336633627023e-06, |
|
"loss": 0.9129, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.2208067940552017, |
|
"grad_norm": 0.6930283267824098, |
|
"learning_rate": 4.624453920063863e-06, |
|
"loss": 0.9769, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.2229299363057325, |
|
"grad_norm": 0.6762283490594463, |
|
"learning_rate": 4.6151395400429665e-06, |
|
"loss": 0.9383, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.22505307855626328, |
|
"grad_norm": 0.7313509583041526, |
|
"learning_rate": 4.605720682723331e-06, |
|
"loss": 0.9428, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.22717622080679406, |
|
"grad_norm": 0.7156367191749474, |
|
"learning_rate": 4.596197813349328e-06, |
|
"loss": 0.9834, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.22929936305732485, |
|
"grad_norm": 0.6828464400232375, |
|
"learning_rate": 4.586571402303006e-06, |
|
"loss": 0.9504, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.23142250530785563, |
|
"grad_norm": 0.74716164026175, |
|
"learning_rate": 4.576841925080853e-06, |
|
"loss": 0.9998, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.23354564755838642, |
|
"grad_norm": 0.6856088912154495, |
|
"learning_rate": 4.56700986227031e-06, |
|
"loss": 1.0149, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2356687898089172, |
|
"grad_norm": 0.7033606189520333, |
|
"learning_rate": 4.557075699526032e-06, |
|
"loss": 0.9584, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.23779193205944799, |
|
"grad_norm": 0.7043436392355145, |
|
"learning_rate": 4.547039927545899e-06, |
|
"loss": 0.9779, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.23991507430997877, |
|
"grad_norm": 0.8202453116913221, |
|
"learning_rate": 4.536903042046778e-06, |
|
"loss": 0.9505, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.24203821656050956, |
|
"grad_norm": 0.7436505838525592, |
|
"learning_rate": 4.526665543740038e-06, |
|
"loss": 0.9561, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.24416135881104034, |
|
"grad_norm": 0.6478361281734111, |
|
"learning_rate": 4.516327938306818e-06, |
|
"loss": 1.0084, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.24628450106157113, |
|
"grad_norm": 0.7674555846640165, |
|
"learning_rate": 4.505890736373045e-06, |
|
"loss": 0.9767, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2484076433121019, |
|
"grad_norm": 0.7184796384578326, |
|
"learning_rate": 4.495354453484216e-06, |
|
"loss": 0.9801, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2505307855626327, |
|
"grad_norm": 0.677609956598202, |
|
"learning_rate": 4.4847196100799305e-06, |
|
"loss": 0.9825, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2526539278131635, |
|
"grad_norm": 0.7123651122668029, |
|
"learning_rate": 4.473986731468183e-06, |
|
"loss": 1.0107, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.25477707006369427, |
|
"grad_norm": 0.7156231222559138, |
|
"learning_rate": 4.463156347799419e-06, |
|
"loss": 1.0779, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.25690021231422505, |
|
"grad_norm": 0.7717775840138449, |
|
"learning_rate": 4.452228994040341e-06, |
|
"loss": 0.9622, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.25902335456475584, |
|
"grad_norm": 0.7512072014948016, |
|
"learning_rate": 4.4412052099474916e-06, |
|
"loss": 0.9625, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2611464968152866, |
|
"grad_norm": 0.7124270062225315, |
|
"learning_rate": 4.430085540040587e-06, |
|
"loss": 1.0169, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2632696390658174, |
|
"grad_norm": 0.7058601891740411, |
|
"learning_rate": 4.418870533575626e-06, |
|
"loss": 0.9819, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.2653927813163482, |
|
"grad_norm": 0.6903185636770165, |
|
"learning_rate": 4.40756074451775e-06, |
|
"loss": 0.9398, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.267515923566879, |
|
"grad_norm": 0.7046682682406727, |
|
"learning_rate": 4.396156731513888e-06, |
|
"loss": 1.0341, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.26963906581740976, |
|
"grad_norm": 0.7862567299960644, |
|
"learning_rate": 4.384659057865165e-06, |
|
"loss": 0.947, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.27176220806794055, |
|
"grad_norm": 0.6195482427854749, |
|
"learning_rate": 4.373068291499065e-06, |
|
"loss": 0.8899, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.27388535031847133, |
|
"grad_norm": 0.6985398850512, |
|
"learning_rate": 4.36138500494139e-06, |
|
"loss": 0.9726, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2760084925690021, |
|
"grad_norm": 0.668756553679393, |
|
"learning_rate": 4.349609775287977e-06, |
|
"loss": 0.9855, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2781316348195329, |
|
"grad_norm": 0.6581795928554366, |
|
"learning_rate": 4.337743184176188e-06, |
|
"loss": 0.9596, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2802547770700637, |
|
"grad_norm": 0.6337202167292365, |
|
"learning_rate": 4.325785817756186e-06, |
|
"loss": 0.9839, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.2823779193205945, |
|
"grad_norm": 0.6680059124711757, |
|
"learning_rate": 4.313738266661979e-06, |
|
"loss": 0.9324, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.28450106157112526, |
|
"grad_norm": 0.6598271036519225, |
|
"learning_rate": 4.301601125982246e-06, |
|
"loss": 1.0066, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.28662420382165604, |
|
"grad_norm": 0.7183648150650218, |
|
"learning_rate": 4.289374995230942e-06, |
|
"loss": 1.0136, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.28874734607218683, |
|
"grad_norm": 0.742221341756798, |
|
"learning_rate": 4.277060478317687e-06, |
|
"loss": 1.0008, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.2908704883227176, |
|
"grad_norm": 0.6824304036868553, |
|
"learning_rate": 4.264658183517935e-06, |
|
"loss": 0.9381, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.2929936305732484, |
|
"grad_norm": 0.6505573173225403, |
|
"learning_rate": 4.252168723442927e-06, |
|
"loss": 0.9489, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.2951167728237792, |
|
"grad_norm": 0.6950634835305272, |
|
"learning_rate": 4.23959271500943e-06, |
|
"loss": 0.9373, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.29723991507430997, |
|
"grad_norm": 0.6872675526003413, |
|
"learning_rate": 4.226930779409271e-06, |
|
"loss": 0.9984, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.29936305732484075, |
|
"grad_norm": 0.7693317009374261, |
|
"learning_rate": 4.214183542078646e-06, |
|
"loss": 0.9391, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.30148619957537154, |
|
"grad_norm": 0.6809609221970698, |
|
"learning_rate": 4.201351632667227e-06, |
|
"loss": 1.0107, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3036093418259023, |
|
"grad_norm": 0.6776804857260985, |
|
"learning_rate": 4.1884356850070695e-06, |
|
"loss": 1.0316, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.3057324840764331, |
|
"grad_norm": 0.6858474793512133, |
|
"learning_rate": 4.175436337081289e-06, |
|
"loss": 0.9071, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.3078556263269639, |
|
"grad_norm": 0.7262200113022307, |
|
"learning_rate": 4.162354230992562e-06, |
|
"loss": 0.9751, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3099787685774947, |
|
"grad_norm": 0.7048851306107318, |
|
"learning_rate": 4.149190012931402e-06, |
|
"loss": 0.9222, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.31210191082802546, |
|
"grad_norm": 0.7661302534267068, |
|
"learning_rate": 4.135944333144244e-06, |
|
"loss": 0.9493, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.31422505307855625, |
|
"grad_norm": 0.6647279751401796, |
|
"learning_rate": 4.122617845901322e-06, |
|
"loss": 1.0373, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.31634819532908703, |
|
"grad_norm": 0.677884233878186, |
|
"learning_rate": 4.109211209464354e-06, |
|
"loss": 1.0299, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3184713375796178, |
|
"grad_norm": 0.6808985503614562, |
|
"learning_rate": 4.095725086054029e-06, |
|
"loss": 0.9564, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3205944798301486, |
|
"grad_norm": 0.6844572260666089, |
|
"learning_rate": 4.0821601418172926e-06, |
|
"loss": 0.9942, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.3227176220806794, |
|
"grad_norm": 0.687636477644006, |
|
"learning_rate": 4.068517046794443e-06, |
|
"loss": 0.9925, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3248407643312102, |
|
"grad_norm": 0.7121462034889847, |
|
"learning_rate": 4.054796474886038e-06, |
|
"loss": 1.0232, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.32696390658174096, |
|
"grad_norm": 0.6791139957154484, |
|
"learning_rate": 4.040999103819606e-06, |
|
"loss": 1.0147, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.32908704883227174, |
|
"grad_norm": 0.6317316423507299, |
|
"learning_rate": 4.0271256151161666e-06, |
|
"loss": 1.0073, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.33121019108280253, |
|
"grad_norm": 0.7021672955838567, |
|
"learning_rate": 4.013176694056571e-06, |
|
"loss": 0.9892, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.6535581508427883, |
|
"learning_rate": 3.999153029647651e-06, |
|
"loss": 1.0406, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.3354564755838641, |
|
"grad_norm": 0.6343047828611024, |
|
"learning_rate": 3.985055314588185e-06, |
|
"loss": 0.9452, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.3375796178343949, |
|
"grad_norm": 0.7092653110810674, |
|
"learning_rate": 3.970884245234684e-06, |
|
"loss": 1.0262, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.33970276008492567, |
|
"grad_norm": 0.7474243693562463, |
|
"learning_rate": 3.956640521566989e-06, |
|
"loss": 0.9584, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.34182590233545646, |
|
"grad_norm": 0.6647251802270744, |
|
"learning_rate": 3.942324847153706e-06, |
|
"loss": 0.9247, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.34394904458598724, |
|
"grad_norm": 0.6878800859406885, |
|
"learning_rate": 3.927937929117447e-06, |
|
"loss": 0.9674, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.346072186836518, |
|
"grad_norm": 0.7194337473522074, |
|
"learning_rate": 3.913480478099898e-06, |
|
"loss": 0.9727, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.3481953290870488, |
|
"grad_norm": 0.7643905012788286, |
|
"learning_rate": 3.898953208226723e-06, |
|
"loss": 1.0143, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.3503184713375796, |
|
"grad_norm": 0.6615757470184159, |
|
"learning_rate": 3.884356837072289e-06, |
|
"loss": 1.0199, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3524416135881104, |
|
"grad_norm": 0.6594351684335442, |
|
"learning_rate": 3.869692085624218e-06, |
|
"loss": 0.929, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.35456475583864117, |
|
"grad_norm": 0.7235028424658448, |
|
"learning_rate": 3.8549596782477775e-06, |
|
"loss": 0.9624, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.35668789808917195, |
|
"grad_norm": 0.6204087859526352, |
|
"learning_rate": 3.8401603426501e-06, |
|
"loss": 1.0118, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.35881104033970274, |
|
"grad_norm": 0.6873006374808731, |
|
"learning_rate": 3.825294809844234e-06, |
|
"loss": 0.9837, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3609341825902335, |
|
"grad_norm": 0.7107317410899462, |
|
"learning_rate": 3.8103638141130406e-06, |
|
"loss": 0.9185, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3630573248407643, |
|
"grad_norm": 0.6549533824581767, |
|
"learning_rate": 3.7953680929729215e-06, |
|
"loss": 0.9547, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3651804670912951, |
|
"grad_norm": 0.6914578323464019, |
|
"learning_rate": 3.7803083871373876e-06, |
|
"loss": 0.9434, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3673036093418259, |
|
"grad_norm": 0.6752976153646614, |
|
"learning_rate": 3.7651854404804757e-06, |
|
"loss": 0.9398, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.36942675159235666, |
|
"grad_norm": 0.6817081041020392, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.981, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.37154989384288745, |
|
"grad_norm": 0.6695314283276965, |
|
"learning_rate": 3.734752815780659e-06, |
|
"loss": 0.9526, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.37367303609341823, |
|
"grad_norm": 0.6927497018483542, |
|
"learning_rate": 3.7194446409569813e-06, |
|
"loss": 0.9904, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.37579617834394907, |
|
"grad_norm": 0.736727316466936, |
|
"learning_rate": 3.7040762316761247e-06, |
|
"loss": 0.9487, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.37791932059447986, |
|
"grad_norm": 0.7074800907113514, |
|
"learning_rate": 3.6886483470605293e-06, |
|
"loss": 0.9896, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.38004246284501064, |
|
"grad_norm": 0.7739492910066014, |
|
"learning_rate": 3.6731617491704195e-06, |
|
"loss": 0.929, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3821656050955414, |
|
"grad_norm": 0.7085501959751563, |
|
"learning_rate": 3.6576172029661583e-06, |
|
"loss": 1.0187, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3842887473460722, |
|
"grad_norm": 0.6968943221383892, |
|
"learning_rate": 3.6420154762704685e-06, |
|
"loss": 0.9873, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.386411889596603, |
|
"grad_norm": 0.6799387175990909, |
|
"learning_rate": 3.6263573397305003e-06, |
|
"loss": 1.0038, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.3885350318471338, |
|
"grad_norm": 0.7111666054095594, |
|
"learning_rate": 3.6106435667797684e-06, |
|
"loss": 0.9405, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.39065817409766457, |
|
"grad_norm": 0.6690528674663481, |
|
"learning_rate": 3.5948749335999493e-06, |
|
"loss": 0.9196, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.39278131634819535, |
|
"grad_norm": 0.7229293413643434, |
|
"learning_rate": 3.5790522190825368e-06, |
|
"loss": 0.9907, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.39490445859872614, |
|
"grad_norm": 0.7327537558494182, |
|
"learning_rate": 3.5631762047903746e-06, |
|
"loss": 1.0238, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.3970276008492569, |
|
"grad_norm": 0.6338409213121795, |
|
"learning_rate": 3.5472476749190465e-06, |
|
"loss": 1.0138, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3991507430997877, |
|
"grad_norm": 0.7381506943950502, |
|
"learning_rate": 3.531267416258144e-06, |
|
"loss": 1.0226, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.4012738853503185, |
|
"grad_norm": 0.7049163316810922, |
|
"learning_rate": 3.5152362181524013e-06, |
|
"loss": 0.936, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.4033970276008493, |
|
"grad_norm": 0.6826780949913842, |
|
"learning_rate": 3.4991548724627054e-06, |
|
"loss": 0.8847, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.40552016985138006, |
|
"grad_norm": 0.6878354292578048, |
|
"learning_rate": 3.4830241735269852e-06, |
|
"loss": 0.9456, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.40764331210191085, |
|
"grad_norm": 0.7027194510942952, |
|
"learning_rate": 3.4668449181209695e-06, |
|
"loss": 0.9483, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.40976645435244163, |
|
"grad_norm": 0.624138086953515, |
|
"learning_rate": 3.450617905418834e-06, |
|
"loss": 0.9822, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.4118895966029724, |
|
"grad_norm": 0.7219498766495058, |
|
"learning_rate": 3.434343936953729e-06, |
|
"loss": 0.9675, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.4140127388535032, |
|
"grad_norm": 0.6397951461970118, |
|
"learning_rate": 3.4180238165781806e-06, |
|
"loss": 0.9268, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.416135881104034, |
|
"grad_norm": 0.6918925648124641, |
|
"learning_rate": 3.4016583504243892e-06, |
|
"loss": 0.9176, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.4182590233545648, |
|
"grad_norm": 0.6466841175073968, |
|
"learning_rate": 3.38524834686441e-06, |
|
"loss": 0.9787, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.42038216560509556, |
|
"grad_norm": 0.89597220464602, |
|
"learning_rate": 3.3687946164702225e-06, |
|
"loss": 1.0087, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.42250530785562634, |
|
"grad_norm": 0.6621404227339525, |
|
"learning_rate": 3.3522979719736923e-06, |
|
"loss": 0.9418, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.42462845010615713, |
|
"grad_norm": 0.6591439083322573, |
|
"learning_rate": 3.3357592282264295e-06, |
|
"loss": 0.9602, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4267515923566879, |
|
"grad_norm": 0.6574210701544694, |
|
"learning_rate": 3.319179202159532e-06, |
|
"loss": 0.9788, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.4288747346072187, |
|
"grad_norm": 0.6417630612223996, |
|
"learning_rate": 3.3025587127432414e-06, |
|
"loss": 0.9729, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.4309978768577495, |
|
"grad_norm": 0.6486686661277697, |
|
"learning_rate": 3.2858985809464823e-06, |
|
"loss": 0.9292, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.43312101910828027, |
|
"grad_norm": 0.707658030297379, |
|
"learning_rate": 3.2691996296963185e-06, |
|
"loss": 0.9609, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.43524416135881105, |
|
"grad_norm": 0.607355433415427, |
|
"learning_rate": 3.2524626838372976e-06, |
|
"loss": 1.0419, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.43736730360934184, |
|
"grad_norm": 0.6420567814286836, |
|
"learning_rate": 3.2356885700907096e-06, |
|
"loss": 0.95, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.4394904458598726, |
|
"grad_norm": 0.7003629480249687, |
|
"learning_rate": 3.218878117013756e-06, |
|
"loss": 0.958, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.4416135881104034, |
|
"grad_norm": 0.7685385917675293, |
|
"learning_rate": 3.202032154958615e-06, |
|
"loss": 0.9206, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.4437367303609342, |
|
"grad_norm": 0.656199294496736, |
|
"learning_rate": 3.1851515160314346e-06, |
|
"loss": 0.9196, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.445859872611465, |
|
"grad_norm": 0.6566912124054102, |
|
"learning_rate": 3.168237034051222e-06, |
|
"loss": 0.9828, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.44798301486199577, |
|
"grad_norm": 0.7017088990329898, |
|
"learning_rate": 3.151289544508664e-06, |
|
"loss": 0.9555, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.45010615711252655, |
|
"grad_norm": 0.6812153887234823, |
|
"learning_rate": 3.1343098845248564e-06, |
|
"loss": 0.9312, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.45222929936305734, |
|
"grad_norm": 0.7179386805807547, |
|
"learning_rate": 3.117298892809953e-06, |
|
"loss": 0.9316, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.4543524416135881, |
|
"grad_norm": 0.6877605246852548, |
|
"learning_rate": 3.100257409621738e-06, |
|
"loss": 0.9169, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.4564755838641189, |
|
"grad_norm": 0.6792629271222044, |
|
"learning_rate": 3.0831862767241206e-06, |
|
"loss": 0.9256, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.4585987261146497, |
|
"grad_norm": 0.7065974786302712, |
|
"learning_rate": 3.0660863373455594e-06, |
|
"loss": 0.974, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.4607218683651805, |
|
"grad_norm": 0.6680687298095636, |
|
"learning_rate": 3.0489584361374073e-06, |
|
"loss": 1.0146, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.46284501061571126, |
|
"grad_norm": 0.602133219295924, |
|
"learning_rate": 3.031803419132192e-06, |
|
"loss": 0.9495, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.46496815286624205, |
|
"grad_norm": 0.634234424637342, |
|
"learning_rate": 3.014622133701826e-06, |
|
"loss": 0.9312, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.46709129511677283, |
|
"grad_norm": 0.638306267326245, |
|
"learning_rate": 2.99741542851575e-06, |
|
"loss": 0.9509, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4692144373673036, |
|
"grad_norm": 0.6406442189247321, |
|
"learning_rate": 2.9801841534990115e-06, |
|
"loss": 1.0191, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.4713375796178344, |
|
"grad_norm": 0.6190626628176242, |
|
"learning_rate": 2.96292915979029e-06, |
|
"loss": 0.8954, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.4734607218683652, |
|
"grad_norm": 0.6898377470965286, |
|
"learning_rate": 2.945651299699843e-06, |
|
"loss": 0.9275, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.47558386411889597, |
|
"grad_norm": 0.6613536340865172, |
|
"learning_rate": 2.9283514266674166e-06, |
|
"loss": 1.0029, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.47770700636942676, |
|
"grad_norm": 1.1441795755503248, |
|
"learning_rate": 2.9110303952200863e-06, |
|
"loss": 0.9334, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.47983014861995754, |
|
"grad_norm": 0.6957291992631914, |
|
"learning_rate": 2.893689060930045e-06, |
|
"loss": 0.9323, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.4819532908704883, |
|
"grad_norm": 0.6915777420289582, |
|
"learning_rate": 2.876328280372346e-06, |
|
"loss": 0.9663, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.4840764331210191, |
|
"grad_norm": 0.658451448474378, |
|
"learning_rate": 2.8589489110825895e-06, |
|
"loss": 0.9655, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.4861995753715499, |
|
"grad_norm": 0.7130813908114916, |
|
"learning_rate": 2.8415518115145673e-06, |
|
"loss": 0.986, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.4883227176220807, |
|
"grad_norm": 0.6409022615953691, |
|
"learning_rate": 2.824137840997858e-06, |
|
"loss": 0.9579, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.49044585987261147, |
|
"grad_norm": 0.8843352828327378, |
|
"learning_rate": 2.8067078596953793e-06, |
|
"loss": 0.9156, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.49256900212314225, |
|
"grad_norm": 0.7332731936399352, |
|
"learning_rate": 2.7892627285609035e-06, |
|
"loss": 1.0487, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.49469214437367304, |
|
"grad_norm": 0.7087669563781219, |
|
"learning_rate": 2.7718033092965267e-06, |
|
"loss": 0.9214, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.4968152866242038, |
|
"grad_norm": 0.713075469680895, |
|
"learning_rate": 2.754330464310108e-06, |
|
"loss": 0.9048, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4989384288747346, |
|
"grad_norm": 0.7753522854989998, |
|
"learning_rate": 2.7368450566726714e-06, |
|
"loss": 0.9705, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.5010615711252654, |
|
"grad_norm": 0.6978893126662162, |
|
"learning_rate": 2.7193479500757684e-06, |
|
"loss": 0.8723, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.5031847133757962, |
|
"grad_norm": 0.7858515284602422, |
|
"learning_rate": 2.7018400087888265e-06, |
|
"loss": 0.9617, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.505307855626327, |
|
"grad_norm": 0.6654303217101978, |
|
"learning_rate": 2.684322097616448e-06, |
|
"loss": 0.99, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.5074309978768577, |
|
"grad_norm": 0.6505973880777469, |
|
"learning_rate": 2.6667950818556992e-06, |
|
"loss": 0.9332, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.5095541401273885, |
|
"grad_norm": 0.6514735895502181, |
|
"learning_rate": 2.649259827253368e-06, |
|
"loss": 1.009, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5116772823779193, |
|
"grad_norm": 0.6589778667351245, |
|
"learning_rate": 2.6317171999631992e-06, |
|
"loss": 0.9445, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.5138004246284501, |
|
"grad_norm": 0.7442921312005703, |
|
"learning_rate": 2.6141680665031116e-06, |
|
"loss": 0.9081, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.5159235668789809, |
|
"grad_norm": 0.6624301541174796, |
|
"learning_rate": 2.5966132937123963e-06, |
|
"loss": 0.9145, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.5180467091295117, |
|
"grad_norm": 0.6691500566559083, |
|
"learning_rate": 2.5790537487088975e-06, |
|
"loss": 0.992, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.5201698513800425, |
|
"grad_norm": 0.6431407890721418, |
|
"learning_rate": 2.561490298846186e-06, |
|
"loss": 1.0049, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5222929936305732, |
|
"grad_norm": 0.6886918135171396, |
|
"learning_rate": 2.5439238116707103e-06, |
|
"loss": 0.9867, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.524416135881104, |
|
"grad_norm": 0.7161610183514595, |
|
"learning_rate": 2.5263551548789494e-06, |
|
"loss": 0.9655, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.5265392781316348, |
|
"grad_norm": 0.6337123622638601, |
|
"learning_rate": 2.5087851962745468e-06, |
|
"loss": 0.9362, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.5286624203821656, |
|
"grad_norm": 0.6805442118508325, |
|
"learning_rate": 2.4912148037254536e-06, |
|
"loss": 0.9749, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.5307855626326964, |
|
"grad_norm": 0.6850319802840648, |
|
"learning_rate": 2.4736448451210514e-06, |
|
"loss": 0.988, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5329087048832272, |
|
"grad_norm": 0.8209419171009714, |
|
"learning_rate": 2.45607618832929e-06, |
|
"loss": 0.9403, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.535031847133758, |
|
"grad_norm": 0.7609613666487252, |
|
"learning_rate": 2.4385097011538144e-06, |
|
"loss": 0.9481, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.5371549893842887, |
|
"grad_norm": 0.6602663751489974, |
|
"learning_rate": 2.420946251291103e-06, |
|
"loss": 0.9706, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.5392781316348195, |
|
"grad_norm": 0.6202006180428027, |
|
"learning_rate": 2.4033867062876054e-06, |
|
"loss": 1.0119, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.5414012738853503, |
|
"grad_norm": 0.6644803301325269, |
|
"learning_rate": 2.3858319334968892e-06, |
|
"loss": 0.9325, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5435244161358811, |
|
"grad_norm": 1.1068638082429352, |
|
"learning_rate": 2.368282800036801e-06, |
|
"loss": 0.9264, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.5456475583864119, |
|
"grad_norm": 0.713607799195559, |
|
"learning_rate": 2.350740172746633e-06, |
|
"loss": 0.936, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.5477707006369427, |
|
"grad_norm": 0.6907351623740556, |
|
"learning_rate": 2.3332049181443016e-06, |
|
"loss": 0.9753, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.5498938428874734, |
|
"grad_norm": 0.7074500211398186, |
|
"learning_rate": 2.315677902383553e-06, |
|
"loss": 0.9484, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.5520169851380042, |
|
"grad_norm": 0.6258963310659448, |
|
"learning_rate": 2.298159991211174e-06, |
|
"loss": 0.9195, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.554140127388535, |
|
"grad_norm": 0.6940902471389512, |
|
"learning_rate": 2.280652049924232e-06, |
|
"loss": 0.9607, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.5562632696390658, |
|
"grad_norm": 0.6306810949067136, |
|
"learning_rate": 2.2631549433273294e-06, |
|
"loss": 1.0086, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.5583864118895966, |
|
"grad_norm": 0.6807622298097962, |
|
"learning_rate": 2.2456695356898915e-06, |
|
"loss": 0.8846, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5605095541401274, |
|
"grad_norm": 0.6737682714342212, |
|
"learning_rate": 2.2281966907034737e-06, |
|
"loss": 0.9424, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5626326963906582, |
|
"grad_norm": 0.6759877323774287, |
|
"learning_rate": 2.2107372714390973e-06, |
|
"loss": 0.9961, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.564755838641189, |
|
"grad_norm": 0.7062286704853831, |
|
"learning_rate": 2.193292140304621e-06, |
|
"loss": 0.9936, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.5668789808917197, |
|
"grad_norm": 0.6745480828446051, |
|
"learning_rate": 2.175862159002143e-06, |
|
"loss": 0.952, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.5690021231422505, |
|
"grad_norm": 0.6472133490148038, |
|
"learning_rate": 2.158448188485433e-06, |
|
"loss": 1.0431, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.5711252653927813, |
|
"grad_norm": 0.6380862328245611, |
|
"learning_rate": 2.141051088917411e-06, |
|
"loss": 0.9418, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.5732484076433121, |
|
"grad_norm": 0.6308586370084164, |
|
"learning_rate": 2.1236717196276558e-06, |
|
"loss": 0.9539, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5753715498938429, |
|
"grad_norm": 0.6260367458835687, |
|
"learning_rate": 2.106310939069956e-06, |
|
"loss": 0.9435, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.5774946921443737, |
|
"grad_norm": 0.6674553209217928, |
|
"learning_rate": 2.088969604779914e-06, |
|
"loss": 0.999, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.5796178343949044, |
|
"grad_norm": 0.6682732504211225, |
|
"learning_rate": 2.0716485733325834e-06, |
|
"loss": 0.9189, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.5817409766454352, |
|
"grad_norm": 0.7720268519149664, |
|
"learning_rate": 2.054348700300158e-06, |
|
"loss": 0.9967, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.583864118895966, |
|
"grad_norm": 0.677559579447201, |
|
"learning_rate": 2.037070840209711e-06, |
|
"loss": 1.0036, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5859872611464968, |
|
"grad_norm": 0.7180703759482054, |
|
"learning_rate": 2.019815846500988e-06, |
|
"loss": 0.978, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.5881104033970276, |
|
"grad_norm": 0.6652884434382036, |
|
"learning_rate": 2.0025845714842514e-06, |
|
"loss": 0.9471, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5902335456475584, |
|
"grad_norm": 0.6670192985366462, |
|
"learning_rate": 1.9853778662981747e-06, |
|
"loss": 0.9453, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5923566878980892, |
|
"grad_norm": 0.6816957898824048, |
|
"learning_rate": 1.968196580867808e-06, |
|
"loss": 0.9855, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.5944798301486199, |
|
"grad_norm": 0.6106556647535171, |
|
"learning_rate": 1.951041563862593e-06, |
|
"loss": 1.0012, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5966029723991507, |
|
"grad_norm": 0.6832368297554903, |
|
"learning_rate": 1.933913662654441e-06, |
|
"loss": 0.99, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5987261146496815, |
|
"grad_norm": 0.6967673396654995, |
|
"learning_rate": 1.91681372327588e-06, |
|
"loss": 0.9192, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.6008492569002123, |
|
"grad_norm": 0.6838616920307027, |
|
"learning_rate": 1.899742590378263e-06, |
|
"loss": 0.9807, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.6029723991507431, |
|
"grad_norm": 0.6901283920242337, |
|
"learning_rate": 1.8827011071900476e-06, |
|
"loss": 0.974, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.6050955414012739, |
|
"grad_norm": 0.719962073597759, |
|
"learning_rate": 1.865690115475144e-06, |
|
"loss": 0.9923, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6072186836518046, |
|
"grad_norm": 0.694648374592989, |
|
"learning_rate": 1.8487104554913362e-06, |
|
"loss": 0.9311, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.6093418259023354, |
|
"grad_norm": 0.6149142322093952, |
|
"learning_rate": 1.8317629659487796e-06, |
|
"loss": 0.9273, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.6114649681528662, |
|
"grad_norm": 0.6673784545283598, |
|
"learning_rate": 1.8148484839685663e-06, |
|
"loss": 0.9731, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.613588110403397, |
|
"grad_norm": 0.6882749729992769, |
|
"learning_rate": 1.7979678450413845e-06, |
|
"loss": 0.9049, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.6157112526539278, |
|
"grad_norm": 0.6682848270425117, |
|
"learning_rate": 1.781121882986245e-06, |
|
"loss": 0.9646, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6178343949044586, |
|
"grad_norm": 0.6757546622813148, |
|
"learning_rate": 1.764311429909291e-06, |
|
"loss": 0.9679, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.6199575371549894, |
|
"grad_norm": 0.6213555164128418, |
|
"learning_rate": 1.7475373161627034e-06, |
|
"loss": 0.9799, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.6220806794055201, |
|
"grad_norm": 0.6892438575484697, |
|
"learning_rate": 1.730800370303683e-06, |
|
"loss": 0.9954, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.6242038216560509, |
|
"grad_norm": 0.7655679677743098, |
|
"learning_rate": 1.7141014190535183e-06, |
|
"loss": 0.8903, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.6263269639065817, |
|
"grad_norm": 0.6688086006810142, |
|
"learning_rate": 1.6974412872567598e-06, |
|
"loss": 0.951, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6284501061571125, |
|
"grad_norm": 0.8034311762095041, |
|
"learning_rate": 1.6808207978404685e-06, |
|
"loss": 0.991, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.6305732484076433, |
|
"grad_norm": 0.6831580011060127, |
|
"learning_rate": 1.6642407717735711e-06, |
|
"loss": 0.9735, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.6326963906581741, |
|
"grad_norm": 0.648345056667966, |
|
"learning_rate": 1.647702028026308e-06, |
|
"loss": 0.9365, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.6348195329087049, |
|
"grad_norm": 0.6142478201259879, |
|
"learning_rate": 1.6312053835297783e-06, |
|
"loss": 0.9832, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.6369426751592356, |
|
"grad_norm": 0.6787332342297232, |
|
"learning_rate": 1.6147516531355912e-06, |
|
"loss": 0.9205, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6390658174097664, |
|
"grad_norm": 0.712325405374025, |
|
"learning_rate": 1.5983416495756116e-06, |
|
"loss": 0.9973, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.6411889596602972, |
|
"grad_norm": 0.682805933407662, |
|
"learning_rate": 1.58197618342182e-06, |
|
"loss": 0.9623, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.643312101910828, |
|
"grad_norm": 0.7161626440164035, |
|
"learning_rate": 1.565656063046272e-06, |
|
"loss": 0.9512, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.6454352441613588, |
|
"grad_norm": 0.6680248951343593, |
|
"learning_rate": 1.549382094581166e-06, |
|
"loss": 0.9152, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.6475583864118896, |
|
"grad_norm": 0.6697551511382988, |
|
"learning_rate": 1.5331550818790313e-06, |
|
"loss": 0.9358, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6496815286624203, |
|
"grad_norm": 0.639520157757964, |
|
"learning_rate": 1.516975826473015e-06, |
|
"loss": 0.9663, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.6518046709129511, |
|
"grad_norm": 0.6798464046837605, |
|
"learning_rate": 1.5008451275372948e-06, |
|
"loss": 0.9648, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.6539278131634819, |
|
"grad_norm": 0.6181496284024336, |
|
"learning_rate": 1.4847637818475992e-06, |
|
"loss": 0.9688, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.6560509554140127, |
|
"grad_norm": 0.6388645164368109, |
|
"learning_rate": 1.4687325837418564e-06, |
|
"loss": 1.0343, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.6581740976645435, |
|
"grad_norm": 0.6279838621791385, |
|
"learning_rate": 1.4527523250809544e-06, |
|
"loss": 0.991, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6602972399150743, |
|
"grad_norm": 0.7150277175256885, |
|
"learning_rate": 1.4368237952096258e-06, |
|
"loss": 0.9415, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.6624203821656051, |
|
"grad_norm": 0.684047421357306, |
|
"learning_rate": 1.4209477809174634e-06, |
|
"loss": 0.9117, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.6645435244161358, |
|
"grad_norm": 0.7294054910622478, |
|
"learning_rate": 1.4051250664000515e-06, |
|
"loss": 0.9571, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.6266249724475469, |
|
"learning_rate": 1.389356433220232e-06, |
|
"loss": 0.976, |
|
"step": 314 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 471, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 157, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 46655615139840.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|