|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9968652037617556, |
|
"eval_steps": 500, |
|
"global_step": 424, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004702194357366771, |
|
"grad_norm": 3.308954954147339, |
|
"learning_rate": 5.0000000000000004e-08, |
|
"loss": 0.9758, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009404388714733543, |
|
"grad_norm": 3.0945961475372314, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 0.9784, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.014106583072100314, |
|
"grad_norm": 2.6266396045684814, |
|
"learning_rate": 1.5000000000000002e-07, |
|
"loss": 0.9407, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.018808777429467086, |
|
"grad_norm": 2.781346321105957, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 0.983, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.023510971786833857, |
|
"grad_norm": 2.8360986709594727, |
|
"learning_rate": 2.5000000000000004e-07, |
|
"loss": 0.941, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02821316614420063, |
|
"grad_norm": 2.814222812652588, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 0.9675, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.032915360501567396, |
|
"grad_norm": 2.821528196334839, |
|
"learning_rate": 3.5000000000000004e-07, |
|
"loss": 0.8856, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03761755485893417, |
|
"grad_norm": 2.7562029361724854, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 0.95, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04231974921630094, |
|
"grad_norm": 2.9743621349334717, |
|
"learning_rate": 4.5000000000000003e-07, |
|
"loss": 0.9654, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.047021943573667714, |
|
"grad_norm": 3.0444681644439697, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.9133, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05172413793103448, |
|
"grad_norm": 2.9696431159973145, |
|
"learning_rate": 5.5e-07, |
|
"loss": 0.9425, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05642633228840126, |
|
"grad_norm": 2.8478922843933105, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 0.9743, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.061128526645768025, |
|
"grad_norm": 2.664818525314331, |
|
"learning_rate": 6.5e-07, |
|
"loss": 0.9276, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06583072100313479, |
|
"grad_norm": 2.727060556411743, |
|
"learning_rate": 7.000000000000001e-07, |
|
"loss": 0.9221, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.07053291536050156, |
|
"grad_norm": 2.8166537284851074, |
|
"learning_rate": 7.5e-07, |
|
"loss": 0.9406, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07523510971786834, |
|
"grad_norm": 2.8412551879882812, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 0.901, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07993730407523511, |
|
"grad_norm": 2.6464271545410156, |
|
"learning_rate": 8.500000000000001e-07, |
|
"loss": 0.9586, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08463949843260188, |
|
"grad_norm": 2.506002187728882, |
|
"learning_rate": 9.000000000000001e-07, |
|
"loss": 0.8895, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.08934169278996865, |
|
"grad_norm": 2.663196086883545, |
|
"learning_rate": 9.500000000000001e-07, |
|
"loss": 0.8833, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.09404388714733543, |
|
"grad_norm": 2.5434982776641846, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.8737, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0987460815047022, |
|
"grad_norm": 2.2520358562469482, |
|
"learning_rate": 1.0500000000000001e-06, |
|
"loss": 0.8895, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.10344827586206896, |
|
"grad_norm": 2.3946690559387207, |
|
"learning_rate": 1.1e-06, |
|
"loss": 0.8405, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.10815047021943573, |
|
"grad_norm": 2.209516763687134, |
|
"learning_rate": 1.1500000000000002e-06, |
|
"loss": 0.8441, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.11285266457680251, |
|
"grad_norm": 2.3084263801574707, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 0.8103, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.11755485893416928, |
|
"grad_norm": 2.4108471870422363, |
|
"learning_rate": 1.25e-06, |
|
"loss": 0.808, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.12225705329153605, |
|
"grad_norm": 2.116116762161255, |
|
"learning_rate": 1.3e-06, |
|
"loss": 0.7901, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.12695924764890282, |
|
"grad_norm": 2.2457115650177, |
|
"learning_rate": 1.3500000000000002e-06, |
|
"loss": 0.7737, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.13166144200626959, |
|
"grad_norm": 1.9436711072921753, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 0.7465, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.13636363636363635, |
|
"grad_norm": 2.1678073406219482, |
|
"learning_rate": 1.45e-06, |
|
"loss": 0.7819, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.14106583072100312, |
|
"grad_norm": 1.8869282007217407, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.7653, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.14576802507836992, |
|
"grad_norm": 1.6957731246948242, |
|
"learning_rate": 1.5500000000000002e-06, |
|
"loss": 0.7659, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.15047021943573669, |
|
"grad_norm": 1.4701424837112427, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 0.7553, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.15517241379310345, |
|
"grad_norm": 1.3234986066818237, |
|
"learning_rate": 1.6500000000000003e-06, |
|
"loss": 0.6875, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.15987460815047022, |
|
"grad_norm": 1.3325831890106201, |
|
"learning_rate": 1.7000000000000002e-06, |
|
"loss": 0.6681, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.164576802507837, |
|
"grad_norm": 1.3700138330459595, |
|
"learning_rate": 1.75e-06, |
|
"loss": 0.7635, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.16927899686520376, |
|
"grad_norm": 1.2316628694534302, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 0.6776, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.17398119122257052, |
|
"grad_norm": 1.170616865158081, |
|
"learning_rate": 1.85e-06, |
|
"loss": 0.7073, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1786833855799373, |
|
"grad_norm": 1.1797659397125244, |
|
"learning_rate": 1.9000000000000002e-06, |
|
"loss": 0.7572, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1833855799373041, |
|
"grad_norm": 1.0123047828674316, |
|
"learning_rate": 1.9500000000000004e-06, |
|
"loss": 0.6798, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.18808777429467086, |
|
"grad_norm": 0.975169837474823, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.7088, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.19278996865203762, |
|
"grad_norm": 0.891377866268158, |
|
"learning_rate": 2.05e-06, |
|
"loss": 0.6696, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1974921630094044, |
|
"grad_norm": 0.8997182250022888, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 0.7135, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.20219435736677116, |
|
"grad_norm": 0.9378617405891418, |
|
"learning_rate": 2.15e-06, |
|
"loss": 0.6827, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.20689655172413793, |
|
"grad_norm": 0.8060838580131531, |
|
"learning_rate": 2.2e-06, |
|
"loss": 0.6704, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.2115987460815047, |
|
"grad_norm": 0.7708892822265625, |
|
"learning_rate": 2.25e-06, |
|
"loss": 0.6499, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.21630094043887146, |
|
"grad_norm": 0.7236770987510681, |
|
"learning_rate": 2.3000000000000004e-06, |
|
"loss": 0.657, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.22100313479623823, |
|
"grad_norm": 0.6666117906570435, |
|
"learning_rate": 2.35e-06, |
|
"loss": 0.634, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.22570532915360503, |
|
"grad_norm": 0.6475918292999268, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 0.6237, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.2304075235109718, |
|
"grad_norm": 0.7277692556381226, |
|
"learning_rate": 2.4500000000000003e-06, |
|
"loss": 0.6707, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.23510971786833856, |
|
"grad_norm": 0.6572731733322144, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.6812, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.23981191222570533, |
|
"grad_norm": 0.6716486811637878, |
|
"learning_rate": 2.55e-06, |
|
"loss": 0.6803, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2445141065830721, |
|
"grad_norm": 0.6065218448638916, |
|
"learning_rate": 2.6e-06, |
|
"loss": 0.6353, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.24921630094043887, |
|
"grad_norm": 0.6749820709228516, |
|
"learning_rate": 2.6500000000000005e-06, |
|
"loss": 0.6393, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.25391849529780564, |
|
"grad_norm": 0.6187207102775574, |
|
"learning_rate": 2.7000000000000004e-06, |
|
"loss": 0.6318, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.25862068965517243, |
|
"grad_norm": 0.6700596809387207, |
|
"learning_rate": 2.7500000000000004e-06, |
|
"loss": 0.6762, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.26332288401253917, |
|
"grad_norm": 0.6460264921188354, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 0.6388, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.26802507836990597, |
|
"grad_norm": 0.7263085842132568, |
|
"learning_rate": 2.85e-06, |
|
"loss": 0.6413, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 0.674784779548645, |
|
"learning_rate": 2.9e-06, |
|
"loss": 0.6134, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2774294670846395, |
|
"grad_norm": 0.6552851796150208, |
|
"learning_rate": 2.95e-06, |
|
"loss": 0.6508, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.28213166144200624, |
|
"grad_norm": 0.7160854339599609, |
|
"learning_rate": 3e-06, |
|
"loss": 0.677, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.28683385579937304, |
|
"grad_norm": 0.682802677154541, |
|
"learning_rate": 3.05e-06, |
|
"loss": 0.5983, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.29153605015673983, |
|
"grad_norm": 0.5360945463180542, |
|
"learning_rate": 3.1000000000000004e-06, |
|
"loss": 0.5928, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2962382445141066, |
|
"grad_norm": 0.6875145435333252, |
|
"learning_rate": 3.1500000000000003e-06, |
|
"loss": 0.6366, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.30094043887147337, |
|
"grad_norm": 0.5564191937446594, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 0.6466, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.3056426332288401, |
|
"grad_norm": 0.8959600925445557, |
|
"learning_rate": 3.2500000000000002e-06, |
|
"loss": 0.5875, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3103448275862069, |
|
"grad_norm": 0.5513110160827637, |
|
"learning_rate": 3.3000000000000006e-06, |
|
"loss": 0.6042, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.31504702194357365, |
|
"grad_norm": 0.5638250112533569, |
|
"learning_rate": 3.3500000000000005e-06, |
|
"loss": 0.6343, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.31974921630094044, |
|
"grad_norm": 0.5723473429679871, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 0.6007, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.32445141065830724, |
|
"grad_norm": 0.5193734765052795, |
|
"learning_rate": 3.45e-06, |
|
"loss": 0.5877, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.329153605015674, |
|
"grad_norm": 0.7437443733215332, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.6579, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3338557993730408, |
|
"grad_norm": 0.5810403823852539, |
|
"learning_rate": 3.5500000000000003e-06, |
|
"loss": 0.625, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3385579937304075, |
|
"grad_norm": 0.5594515204429626, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 0.6039, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3432601880877743, |
|
"grad_norm": 0.5781659483909607, |
|
"learning_rate": 3.65e-06, |
|
"loss": 0.5936, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.34796238244514105, |
|
"grad_norm": 0.5736168622970581, |
|
"learning_rate": 3.7e-06, |
|
"loss": 0.61, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.35266457680250785, |
|
"grad_norm": 0.5082181096076965, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.6031, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3573667711598746, |
|
"grad_norm": 0.5188243985176086, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 0.6048, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3620689655172414, |
|
"grad_norm": 0.678931713104248, |
|
"learning_rate": 3.85e-06, |
|
"loss": 0.615, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3667711598746082, |
|
"grad_norm": 0.5580578446388245, |
|
"learning_rate": 3.900000000000001e-06, |
|
"loss": 0.5622, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.3714733542319749, |
|
"grad_norm": 0.5222985744476318, |
|
"learning_rate": 3.95e-06, |
|
"loss": 0.6233, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.3761755485893417, |
|
"grad_norm": 0.6105766892433167, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.5448, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.38087774294670845, |
|
"grad_norm": 0.4837906062602997, |
|
"learning_rate": 4.05e-06, |
|
"loss": 0.6058, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.38557993730407525, |
|
"grad_norm": 0.5260081887245178, |
|
"learning_rate": 4.1e-06, |
|
"loss": 0.5753, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.390282131661442, |
|
"grad_norm": 0.5424107909202576, |
|
"learning_rate": 4.15e-06, |
|
"loss": 0.5836, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3949843260188088, |
|
"grad_norm": 0.5307353734970093, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 0.5891, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.3996865203761755, |
|
"grad_norm": 0.5606550574302673, |
|
"learning_rate": 4.25e-06, |
|
"loss": 0.6097, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.4043887147335423, |
|
"grad_norm": 0.5380442142486572, |
|
"learning_rate": 4.3e-06, |
|
"loss": 0.6161, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.4090909090909091, |
|
"grad_norm": 0.7110352516174316, |
|
"learning_rate": 4.350000000000001e-06, |
|
"loss": 0.5681, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.41379310344827586, |
|
"grad_norm": 0.525680422782898, |
|
"learning_rate": 4.4e-06, |
|
"loss": 0.5942, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.41849529780564265, |
|
"grad_norm": 0.5567077398300171, |
|
"learning_rate": 4.450000000000001e-06, |
|
"loss": 0.5929, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.4231974921630094, |
|
"grad_norm": 0.5358735918998718, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.5717, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4278996865203762, |
|
"grad_norm": 0.48081454634666443, |
|
"learning_rate": 4.5500000000000005e-06, |
|
"loss": 0.5984, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.43260188087774293, |
|
"grad_norm": 0.4900401830673218, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 0.6017, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.4373040752351097, |
|
"grad_norm": 0.48369699716567993, |
|
"learning_rate": 4.65e-06, |
|
"loss": 0.5295, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.44200626959247646, |
|
"grad_norm": 0.4992128312587738, |
|
"learning_rate": 4.7e-06, |
|
"loss": 0.5495, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.44670846394984326, |
|
"grad_norm": 0.5139058828353882, |
|
"learning_rate": 4.75e-06, |
|
"loss": 0.5528, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.45141065830721006, |
|
"grad_norm": 0.485097199678421, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.5528, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.4561128526645768, |
|
"grad_norm": 0.5944838523864746, |
|
"learning_rate": 4.85e-06, |
|
"loss": 0.5577, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.4608150470219436, |
|
"grad_norm": 0.5313893556594849, |
|
"learning_rate": 4.9000000000000005e-06, |
|
"loss": 0.5744, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.46551724137931033, |
|
"grad_norm": 0.49062076210975647, |
|
"learning_rate": 4.95e-06, |
|
"loss": 0.5563, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4702194357366771, |
|
"grad_norm": 0.5196151733398438, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5817, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.47492163009404387, |
|
"grad_norm": 0.507022500038147, |
|
"learning_rate": 4.9999910183883085e-06, |
|
"loss": 0.5608, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.47962382445141066, |
|
"grad_norm": 0.494018018245697, |
|
"learning_rate": 4.999964073617768e-06, |
|
"loss": 0.57, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.4843260188087774, |
|
"grad_norm": 0.48901236057281494, |
|
"learning_rate": 4.999919165881985e-06, |
|
"loss": 0.5568, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4890282131661442, |
|
"grad_norm": 0.5166047811508179, |
|
"learning_rate": 4.999856295503635e-06, |
|
"loss": 0.5676, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.493730407523511, |
|
"grad_norm": 0.4952854812145233, |
|
"learning_rate": 4.9997754629344596e-06, |
|
"loss": 0.5454, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.49843260188087773, |
|
"grad_norm": 0.48603224754333496, |
|
"learning_rate": 4.999676668755263e-06, |
|
"loss": 0.5351, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.5031347962382445, |
|
"grad_norm": 0.4922390878200531, |
|
"learning_rate": 4.999559913675912e-06, |
|
"loss": 0.5672, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.5078369905956113, |
|
"grad_norm": 0.4846023619174957, |
|
"learning_rate": 4.999425198535325e-06, |
|
"loss": 0.5472, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.512539184952978, |
|
"grad_norm": 0.4974515736103058, |
|
"learning_rate": 4.999272524301469e-06, |
|
"loss": 0.5507, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.5172413793103449, |
|
"grad_norm": 0.5394945740699768, |
|
"learning_rate": 4.9991018920713505e-06, |
|
"loss": 0.5667, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5219435736677116, |
|
"grad_norm": 0.5393733978271484, |
|
"learning_rate": 4.9989133030710154e-06, |
|
"loss": 0.5505, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5266457680250783, |
|
"grad_norm": 0.7595535516738892, |
|
"learning_rate": 4.9987067586555275e-06, |
|
"loss": 0.5484, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5313479623824452, |
|
"grad_norm": 0.5122578144073486, |
|
"learning_rate": 4.998482260308969e-06, |
|
"loss": 0.5669, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5360501567398119, |
|
"grad_norm": 0.5233834981918335, |
|
"learning_rate": 4.998239809644427e-06, |
|
"loss": 0.5613, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.5407523510971787, |
|
"grad_norm": 3.08868408203125, |
|
"learning_rate": 4.9979794084039755e-06, |
|
"loss": 0.5746, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 0.49716249108314514, |
|
"learning_rate": 4.997701058458677e-06, |
|
"loss": 0.5349, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.5501567398119123, |
|
"grad_norm": 0.5552968978881836, |
|
"learning_rate": 4.997404761808554e-06, |
|
"loss": 0.569, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.554858934169279, |
|
"grad_norm": 0.5029860734939575, |
|
"learning_rate": 4.9970905205825845e-06, |
|
"loss": 0.5755, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.5595611285266457, |
|
"grad_norm": 0.5006077885627747, |
|
"learning_rate": 4.996758337038683e-06, |
|
"loss": 0.5654, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5642633228840125, |
|
"grad_norm": 0.4900127053260803, |
|
"learning_rate": 4.996408213563684e-06, |
|
"loss": 0.5575, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5689655172413793, |
|
"grad_norm": 0.5142834186553955, |
|
"learning_rate": 4.996040152673326e-06, |
|
"loss": 0.5247, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5736677115987461, |
|
"grad_norm": 0.5097452998161316, |
|
"learning_rate": 4.995654157012233e-06, |
|
"loss": 0.538, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5783699059561128, |
|
"grad_norm": 0.5581697225570679, |
|
"learning_rate": 4.995250229353895e-06, |
|
"loss": 0.5324, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5830721003134797, |
|
"grad_norm": 0.5943841934204102, |
|
"learning_rate": 4.99482837260065e-06, |
|
"loss": 0.547, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5877742946708464, |
|
"grad_norm": 0.541631281375885, |
|
"learning_rate": 4.99438858978366e-06, |
|
"loss": 0.577, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5924764890282131, |
|
"grad_norm": 0.6008974313735962, |
|
"learning_rate": 4.993930884062892e-06, |
|
"loss": 0.5579, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5971786833855799, |
|
"grad_norm": 0.7009158730506897, |
|
"learning_rate": 4.993455258727094e-06, |
|
"loss": 0.5579, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.6018808777429467, |
|
"grad_norm": 0.5942498445510864, |
|
"learning_rate": 4.992961717193773e-06, |
|
"loss": 0.5599, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.6065830721003135, |
|
"grad_norm": 0.4936768710613251, |
|
"learning_rate": 4.9924502630091655e-06, |
|
"loss": 0.5676, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.6112852664576802, |
|
"grad_norm": 0.5035172700881958, |
|
"learning_rate": 4.99192089984822e-06, |
|
"loss": 0.5519, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.6159874608150471, |
|
"grad_norm": 0.8178919553756714, |
|
"learning_rate": 4.9913736315145614e-06, |
|
"loss": 0.5594, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.6206896551724138, |
|
"grad_norm": 0.4977147877216339, |
|
"learning_rate": 4.990808461940474e-06, |
|
"loss": 0.54, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6253918495297806, |
|
"grad_norm": 0.7395517826080322, |
|
"learning_rate": 4.990225395186862e-06, |
|
"loss": 0.5467, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.6300940438871473, |
|
"grad_norm": 1.1424250602722168, |
|
"learning_rate": 4.9896244354432314e-06, |
|
"loss": 0.5432, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.6347962382445141, |
|
"grad_norm": 0.5002970695495605, |
|
"learning_rate": 4.98900558702765e-06, |
|
"loss": 0.5526, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6394984326018809, |
|
"grad_norm": 0.49464455246925354, |
|
"learning_rate": 4.9883688543867225e-06, |
|
"loss": 0.563, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6442006269592476, |
|
"grad_norm": 0.48660922050476074, |
|
"learning_rate": 4.987714242095558e-06, |
|
"loss": 0.5328, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6489028213166145, |
|
"grad_norm": 0.5086668133735657, |
|
"learning_rate": 4.9870417548577355e-06, |
|
"loss": 0.5373, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6536050156739812, |
|
"grad_norm": 0.4638510048389435, |
|
"learning_rate": 4.9863513975052696e-06, |
|
"loss": 0.5459, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.658307210031348, |
|
"grad_norm": 0.5742124915122986, |
|
"learning_rate": 4.985643174998578e-06, |
|
"loss": 0.5681, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6630094043887147, |
|
"grad_norm": 0.5262806415557861, |
|
"learning_rate": 4.984917092426445e-06, |
|
"loss": 0.5549, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6677115987460815, |
|
"grad_norm": 0.4765531122684479, |
|
"learning_rate": 4.984173155005982e-06, |
|
"loss": 0.5435, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.6724137931034483, |
|
"grad_norm": 0.4731754660606384, |
|
"learning_rate": 4.983411368082597e-06, |
|
"loss": 0.5484, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.677115987460815, |
|
"grad_norm": 0.5015071630477905, |
|
"learning_rate": 4.982631737129948e-06, |
|
"loss": 0.5314, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.6818181818181818, |
|
"grad_norm": 0.5013841986656189, |
|
"learning_rate": 4.98183426774991e-06, |
|
"loss": 0.5705, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6865203761755486, |
|
"grad_norm": 0.5398444533348083, |
|
"learning_rate": 4.981018965672529e-06, |
|
"loss": 0.5713, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6912225705329154, |
|
"grad_norm": 0.5247324109077454, |
|
"learning_rate": 4.98018583675599e-06, |
|
"loss": 0.5512, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6959247648902821, |
|
"grad_norm": 0.488935649394989, |
|
"learning_rate": 4.979334886986562e-06, |
|
"loss": 0.5532, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.700626959247649, |
|
"grad_norm": 0.5056779384613037, |
|
"learning_rate": 4.978466122478567e-06, |
|
"loss": 0.5668, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.7053291536050157, |
|
"grad_norm": 0.5102982521057129, |
|
"learning_rate": 4.97757954947433e-06, |
|
"loss": 0.5346, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7100313479623824, |
|
"grad_norm": 0.4989202320575714, |
|
"learning_rate": 4.976675174344132e-06, |
|
"loss": 0.5488, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.7147335423197492, |
|
"grad_norm": 0.4996865689754486, |
|
"learning_rate": 4.975753003586172e-06, |
|
"loss": 0.528, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.719435736677116, |
|
"grad_norm": 0.5011430978775024, |
|
"learning_rate": 4.974813043826513e-06, |
|
"loss": 0.5221, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.7241379310344828, |
|
"grad_norm": 0.530082643032074, |
|
"learning_rate": 4.973855301819039e-06, |
|
"loss": 0.5398, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.7288401253918495, |
|
"grad_norm": 0.5243843197822571, |
|
"learning_rate": 4.972879784445402e-06, |
|
"loss": 0.5215, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.7335423197492164, |
|
"grad_norm": 0.5321754217147827, |
|
"learning_rate": 4.971886498714978e-06, |
|
"loss": 0.5432, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.7382445141065831, |
|
"grad_norm": 0.5054258704185486, |
|
"learning_rate": 4.97087545176481e-06, |
|
"loss": 0.5197, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.7429467084639498, |
|
"grad_norm": 0.5209529995918274, |
|
"learning_rate": 4.9698466508595655e-06, |
|
"loss": 0.5486, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.7476489028213166, |
|
"grad_norm": 0.47057369351387024, |
|
"learning_rate": 4.9688001033914756e-06, |
|
"loss": 0.5358, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.7523510971786834, |
|
"grad_norm": 0.5286029577255249, |
|
"learning_rate": 4.967735816880286e-06, |
|
"loss": 0.5471, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7570532915360502, |
|
"grad_norm": 0.7403479218482971, |
|
"learning_rate": 4.966653798973205e-06, |
|
"loss": 0.5264, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.7617554858934169, |
|
"grad_norm": 0.5945819020271301, |
|
"learning_rate": 4.965554057444842e-06, |
|
"loss": 0.5194, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.7664576802507836, |
|
"grad_norm": 0.48147913813591003, |
|
"learning_rate": 4.964436600197161e-06, |
|
"loss": 0.5425, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.7711598746081505, |
|
"grad_norm": 0.5892655849456787, |
|
"learning_rate": 4.963301435259413e-06, |
|
"loss": 0.5134, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.7758620689655172, |
|
"grad_norm": 0.546521782875061, |
|
"learning_rate": 4.962148570788088e-06, |
|
"loss": 0.5334, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.780564263322884, |
|
"grad_norm": 0.5432559251785278, |
|
"learning_rate": 4.96097801506685e-06, |
|
"loss": 0.5214, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.7852664576802508, |
|
"grad_norm": 0.4892060458660126, |
|
"learning_rate": 4.959789776506482e-06, |
|
"loss": 0.5307, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.7899686520376176, |
|
"grad_norm": 0.5296177268028259, |
|
"learning_rate": 4.958583863644821e-06, |
|
"loss": 0.5539, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.7946708463949843, |
|
"grad_norm": 0.4970422685146332, |
|
"learning_rate": 4.9573602851466985e-06, |
|
"loss": 0.5193, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.799373040752351, |
|
"grad_norm": 0.5201449990272522, |
|
"learning_rate": 4.9561190498038815e-06, |
|
"loss": 0.542, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.8040752351097179, |
|
"grad_norm": 0.6067378520965576, |
|
"learning_rate": 4.954860166535005e-06, |
|
"loss": 0.5319, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.8087774294670846, |
|
"grad_norm": 0.4754677712917328, |
|
"learning_rate": 4.95358364438551e-06, |
|
"loss": 0.5366, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.8134796238244514, |
|
"grad_norm": 0.4970039129257202, |
|
"learning_rate": 4.952289492527576e-06, |
|
"loss": 0.5626, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.8181818181818182, |
|
"grad_norm": 0.5602061748504639, |
|
"learning_rate": 4.9509777202600605e-06, |
|
"loss": 0.4951, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.822884012539185, |
|
"grad_norm": 0.5069930553436279, |
|
"learning_rate": 4.949648337008425e-06, |
|
"loss": 0.5419, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.8275862068965517, |
|
"grad_norm": 2.1186537742614746, |
|
"learning_rate": 4.948301352324674e-06, |
|
"loss": 0.5361, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.8322884012539185, |
|
"grad_norm": 0.4984792470932007, |
|
"learning_rate": 4.946936775887281e-06, |
|
"loss": 0.5315, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.8369905956112853, |
|
"grad_norm": 0.49590349197387695, |
|
"learning_rate": 4.945554617501124e-06, |
|
"loss": 0.5258, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.841692789968652, |
|
"grad_norm": 0.48108625411987305, |
|
"learning_rate": 4.944154887097411e-06, |
|
"loss": 0.5566, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.8463949843260188, |
|
"grad_norm": 0.5167516469955444, |
|
"learning_rate": 4.942737594733608e-06, |
|
"loss": 0.5285, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8510971786833855, |
|
"grad_norm": 0.4649943709373474, |
|
"learning_rate": 4.941302750593373e-06, |
|
"loss": 0.5443, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.8557993730407524, |
|
"grad_norm": 0.5184140205383301, |
|
"learning_rate": 4.939850364986475e-06, |
|
"loss": 0.4859, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.8605015673981191, |
|
"grad_norm": 0.6746440529823303, |
|
"learning_rate": 4.938380448348725e-06, |
|
"loss": 0.4947, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.8652037617554859, |
|
"grad_norm": 0.5150278806686401, |
|
"learning_rate": 4.9368930112419e-06, |
|
"loss": 0.5351, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.8699059561128527, |
|
"grad_norm": 0.47614121437072754, |
|
"learning_rate": 4.935388064353665e-06, |
|
"loss": 0.5362, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.8746081504702194, |
|
"grad_norm": 0.5044179558753967, |
|
"learning_rate": 4.9338656184975e-06, |
|
"loss": 0.532, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.8793103448275862, |
|
"grad_norm": 0.4887774884700775, |
|
"learning_rate": 4.932325684612618e-06, |
|
"loss": 0.5448, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.8840125391849529, |
|
"grad_norm": 0.49574583768844604, |
|
"learning_rate": 4.93076827376389e-06, |
|
"loss": 0.5485, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.8887147335423198, |
|
"grad_norm": 0.5294933915138245, |
|
"learning_rate": 4.9291933971417635e-06, |
|
"loss": 0.5441, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.8934169278996865, |
|
"grad_norm": 0.4594694972038269, |
|
"learning_rate": 4.9276010660621835e-06, |
|
"loss": 0.5322, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8981191222570533, |
|
"grad_norm": 0.47507232427597046, |
|
"learning_rate": 4.925991291966508e-06, |
|
"loss": 0.5103, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.9028213166144201, |
|
"grad_norm": 0.4808727204799652, |
|
"learning_rate": 4.92436408642143e-06, |
|
"loss": 0.5479, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.9075235109717869, |
|
"grad_norm": 0.481921523809433, |
|
"learning_rate": 4.9227194611188934e-06, |
|
"loss": 0.5242, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.9122257053291536, |
|
"grad_norm": 0.6387396454811096, |
|
"learning_rate": 4.921057427876007e-06, |
|
"loss": 0.4937, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.9169278996865203, |
|
"grad_norm": 0.5185182094573975, |
|
"learning_rate": 4.919377998634959e-06, |
|
"loss": 0.5515, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.9216300940438872, |
|
"grad_norm": 0.5029557943344116, |
|
"learning_rate": 4.917681185462934e-06, |
|
"loss": 0.5371, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.9263322884012539, |
|
"grad_norm": 0.5311322808265686, |
|
"learning_rate": 4.915967000552028e-06, |
|
"loss": 0.5275, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.9310344827586207, |
|
"grad_norm": 0.46653756499290466, |
|
"learning_rate": 4.914235456219154e-06, |
|
"loss": 0.5208, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.9357366771159875, |
|
"grad_norm": 0.4970494508743286, |
|
"learning_rate": 4.912486564905959e-06, |
|
"loss": 0.549, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.9404388714733543, |
|
"grad_norm": 0.5315521359443665, |
|
"learning_rate": 4.910720339178735e-06, |
|
"loss": 0.5314, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.945141065830721, |
|
"grad_norm": 0.5144534707069397, |
|
"learning_rate": 4.908936791728323e-06, |
|
"loss": 0.5372, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.9498432601880877, |
|
"grad_norm": 0.6076200008392334, |
|
"learning_rate": 4.907135935370027e-06, |
|
"loss": 0.5353, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.9545454545454546, |
|
"grad_norm": 0.5259833931922913, |
|
"learning_rate": 4.905317783043523e-06, |
|
"loss": 0.5442, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.9592476489028213, |
|
"grad_norm": 0.4931281507015228, |
|
"learning_rate": 4.9034823478127605e-06, |
|
"loss": 0.5246, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.9639498432601881, |
|
"grad_norm": 0.4909270405769348, |
|
"learning_rate": 4.901629642865872e-06, |
|
"loss": 0.5011, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.9686520376175548, |
|
"grad_norm": 0.581427276134491, |
|
"learning_rate": 4.89975968151508e-06, |
|
"loss": 0.5253, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.9733542319749217, |
|
"grad_norm": 0.5055932998657227, |
|
"learning_rate": 4.8978724771965965e-06, |
|
"loss": 0.5152, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.9780564263322884, |
|
"grad_norm": 0.5337440371513367, |
|
"learning_rate": 4.895968043470532e-06, |
|
"loss": 0.5321, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.9827586206896551, |
|
"grad_norm": 0.5171258449554443, |
|
"learning_rate": 4.894046394020794e-06, |
|
"loss": 0.4973, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.987460815047022, |
|
"grad_norm": 0.4880523085594177, |
|
"learning_rate": 4.892107542654988e-06, |
|
"loss": 0.529, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.9921630094043887, |
|
"grad_norm": 0.4594219923019409, |
|
"learning_rate": 4.890151503304325e-06, |
|
"loss": 0.5502, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.9968652037617555, |
|
"grad_norm": 0.5071013569831848, |
|
"learning_rate": 4.88817829002351e-06, |
|
"loss": 0.5258, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.0047021943573669, |
|
"grad_norm": 0.953562319278717, |
|
"learning_rate": 4.886187916990653e-06, |
|
"loss": 1.0585, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.0094043887147335, |
|
"grad_norm": 0.46260955929756165, |
|
"learning_rate": 4.884180398507163e-06, |
|
"loss": 0.5036, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.0141065830721003, |
|
"grad_norm": 0.46361738443374634, |
|
"learning_rate": 4.882155748997636e-06, |
|
"loss": 0.4975, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.0188087774294672, |
|
"grad_norm": 0.47986194491386414, |
|
"learning_rate": 4.8801139830097685e-06, |
|
"loss": 0.5039, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.0235109717868338, |
|
"grad_norm": 0.49743711948394775, |
|
"learning_rate": 4.878055115214238e-06, |
|
"loss": 0.5139, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.0282131661442007, |
|
"grad_norm": 0.5503370761871338, |
|
"learning_rate": 4.875979160404607e-06, |
|
"loss": 0.5117, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.0329153605015673, |
|
"grad_norm": 0.47498467564582825, |
|
"learning_rate": 4.873886133497209e-06, |
|
"loss": 0.5195, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.0376175548589341, |
|
"grad_norm": 0.4586140513420105, |
|
"learning_rate": 4.87177604953105e-06, |
|
"loss": 0.5168, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.042319749216301, |
|
"grad_norm": 0.5042305588722229, |
|
"learning_rate": 4.869648923667694e-06, |
|
"loss": 0.4693, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.0470219435736676, |
|
"grad_norm": 0.6340722441673279, |
|
"learning_rate": 4.867504771191154e-06, |
|
"loss": 0.4945, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.0517241379310345, |
|
"grad_norm": 0.4741908311843872, |
|
"learning_rate": 4.865343607507788e-06, |
|
"loss": 0.5027, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.0564263322884013, |
|
"grad_norm": 0.6638283133506775, |
|
"learning_rate": 4.86316544814618e-06, |
|
"loss": 0.5191, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.061128526645768, |
|
"grad_norm": 0.7720419764518738, |
|
"learning_rate": 4.860970308757038e-06, |
|
"loss": 0.4634, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.0658307210031348, |
|
"grad_norm": 0.5258729457855225, |
|
"learning_rate": 4.858758205113072e-06, |
|
"loss": 0.4924, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.0705329153605017, |
|
"grad_norm": 0.5001716017723083, |
|
"learning_rate": 4.856529153108888e-06, |
|
"loss": 0.5241, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.0752351097178683, |
|
"grad_norm": 0.5368038415908813, |
|
"learning_rate": 4.854283168760868e-06, |
|
"loss": 0.5031, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.0799373040752351, |
|
"grad_norm": 0.5357080101966858, |
|
"learning_rate": 4.85202026820706e-06, |
|
"loss": 0.5003, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.084639498432602, |
|
"grad_norm": 0.5052481293678284, |
|
"learning_rate": 4.84974046770706e-06, |
|
"loss": 0.5375, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.0893416927899686, |
|
"grad_norm": 0.5544849634170532, |
|
"learning_rate": 4.847443783641893e-06, |
|
"loss": 0.4472, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.0940438871473355, |
|
"grad_norm": 0.49668335914611816, |
|
"learning_rate": 4.845130232513901e-06, |
|
"loss": 0.4934, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.098746081504702, |
|
"grad_norm": 0.5318028330802917, |
|
"learning_rate": 4.842799830946615e-06, |
|
"loss": 0.4903, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.103448275862069, |
|
"grad_norm": 0.48978278040885925, |
|
"learning_rate": 4.840452595684646e-06, |
|
"loss": 0.4864, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.1081504702194358, |
|
"grad_norm": 0.47105321288108826, |
|
"learning_rate": 4.83808854359356e-06, |
|
"loss": 0.4805, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.1128526645768024, |
|
"grad_norm": 0.6040552854537964, |
|
"learning_rate": 4.835707691659753e-06, |
|
"loss": 0.4839, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.1175548589341693, |
|
"grad_norm": 0.501312255859375, |
|
"learning_rate": 4.8333100569903365e-06, |
|
"loss": 0.495, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.1222570532915361, |
|
"grad_norm": 0.48165130615234375, |
|
"learning_rate": 4.8308956568130094e-06, |
|
"loss": 0.5144, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.1269592476489028, |
|
"grad_norm": 0.5097485780715942, |
|
"learning_rate": 4.828464508475934e-06, |
|
"loss": 0.5081, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.1316614420062696, |
|
"grad_norm": 0.49503275752067566, |
|
"learning_rate": 4.826016629447616e-06, |
|
"loss": 0.5103, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.1363636363636362, |
|
"grad_norm": 0.5472146272659302, |
|
"learning_rate": 4.823552037316775e-06, |
|
"loss": 0.4857, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.141065830721003, |
|
"grad_norm": 0.5358877182006836, |
|
"learning_rate": 4.821070749792218e-06, |
|
"loss": 0.5418, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.14576802507837, |
|
"grad_norm": 0.4832223951816559, |
|
"learning_rate": 4.818572784702713e-06, |
|
"loss": 0.5121, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.1504702194357366, |
|
"grad_norm": 0.928979218006134, |
|
"learning_rate": 4.816058159996863e-06, |
|
"loss": 0.5218, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.1551724137931034, |
|
"grad_norm": 0.46563178300857544, |
|
"learning_rate": 4.813526893742972e-06, |
|
"loss": 0.5045, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.1598746081504703, |
|
"grad_norm": 0.5262567400932312, |
|
"learning_rate": 4.810979004128924e-06, |
|
"loss": 0.4984, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.164576802507837, |
|
"grad_norm": 0.4928685426712036, |
|
"learning_rate": 4.808414509462042e-06, |
|
"loss": 0.5183, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.1692789968652038, |
|
"grad_norm": 0.8799586892127991, |
|
"learning_rate": 4.80583342816896e-06, |
|
"loss": 0.4851, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.1739811912225706, |
|
"grad_norm": 0.5468968749046326, |
|
"learning_rate": 4.803235778795496e-06, |
|
"loss": 0.5242, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.1786833855799372, |
|
"grad_norm": 0.49233874678611755, |
|
"learning_rate": 4.800621580006511e-06, |
|
"loss": 0.4687, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.183385579937304, |
|
"grad_norm": 0.4705159366130829, |
|
"learning_rate": 4.797990850585782e-06, |
|
"loss": 0.5166, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.188087774294671, |
|
"grad_norm": 0.49235984683036804, |
|
"learning_rate": 4.79534360943586e-06, |
|
"loss": 0.4964, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.1927899686520376, |
|
"grad_norm": 0.5998479127883911, |
|
"learning_rate": 4.792679875577937e-06, |
|
"loss": 0.4816, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.1974921630094044, |
|
"grad_norm": 0.6658573746681213, |
|
"learning_rate": 4.789999668151714e-06, |
|
"loss": 0.5157, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.2021943573667713, |
|
"grad_norm": 0.47048208117485046, |
|
"learning_rate": 4.7873030064152545e-06, |
|
"loss": 0.4957, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.206896551724138, |
|
"grad_norm": 0.47910359501838684, |
|
"learning_rate": 4.784589909744856e-06, |
|
"loss": 0.4906, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.2115987460815048, |
|
"grad_norm": 0.6204367876052856, |
|
"learning_rate": 4.7818603976349005e-06, |
|
"loss": 0.5032, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.2163009404388714, |
|
"grad_norm": 0.48487767577171326, |
|
"learning_rate": 4.779114489697724e-06, |
|
"loss": 0.4986, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.2210031347962382, |
|
"grad_norm": 0.5075157284736633, |
|
"learning_rate": 4.776352205663469e-06, |
|
"loss": 0.5021, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.225705329153605, |
|
"grad_norm": 0.4977814257144928, |
|
"learning_rate": 4.773573565379947e-06, |
|
"loss": 0.5132, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.2304075235109717, |
|
"grad_norm": 0.515428900718689, |
|
"learning_rate": 4.770778588812489e-06, |
|
"loss": 0.4783, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.2351097178683386, |
|
"grad_norm": 0.5740492939949036, |
|
"learning_rate": 4.7679672960438135e-06, |
|
"loss": 0.505, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.2398119122257054, |
|
"grad_norm": 0.46476778388023376, |
|
"learning_rate": 4.765139707273872e-06, |
|
"loss": 0.4925, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.244514106583072, |
|
"grad_norm": 0.4873107373714447, |
|
"learning_rate": 4.762295842819707e-06, |
|
"loss": 0.5057, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.249216300940439, |
|
"grad_norm": 0.5337681174278259, |
|
"learning_rate": 4.759435723115308e-06, |
|
"loss": 0.4709, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.2539184952978055, |
|
"grad_norm": 0.5014109015464783, |
|
"learning_rate": 4.756559368711463e-06, |
|
"loss": 0.5074, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.2586206896551724, |
|
"grad_norm": 0.4641067385673523, |
|
"learning_rate": 4.75366680027561e-06, |
|
"loss": 0.4882, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.2633228840125392, |
|
"grad_norm": 0.5014533996582031, |
|
"learning_rate": 4.7507580385916906e-06, |
|
"loss": 0.4911, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.2680250783699059, |
|
"grad_norm": 0.7282389402389526, |
|
"learning_rate": 4.747833104559999e-06, |
|
"loss": 0.4674, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.2727272727272727, |
|
"grad_norm": 0.44998112320899963, |
|
"learning_rate": 4.744892019197033e-06, |
|
"loss": 0.4818, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.2774294670846396, |
|
"grad_norm": 0.48868605494499207, |
|
"learning_rate": 4.74193480363534e-06, |
|
"loss": 0.4891, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.2821316614420062, |
|
"grad_norm": 0.49108532071113586, |
|
"learning_rate": 4.738961479123373e-06, |
|
"loss": 0.497, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.286833855799373, |
|
"grad_norm": 0.49707749485969543, |
|
"learning_rate": 4.735972067025326e-06, |
|
"loss": 0.5044, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.29153605015674, |
|
"grad_norm": 0.5466123223304749, |
|
"learning_rate": 4.732966588820991e-06, |
|
"loss": 0.4966, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.2962382445141065, |
|
"grad_norm": 0.4684564769268036, |
|
"learning_rate": 4.729945066105599e-06, |
|
"loss": 0.475, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.3009404388714734, |
|
"grad_norm": 0.5143007636070251, |
|
"learning_rate": 4.726907520589664e-06, |
|
"loss": 0.4669, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.3056426332288402, |
|
"grad_norm": 0.5112866163253784, |
|
"learning_rate": 4.72385397409883e-06, |
|
"loss": 0.5085, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.3103448275862069, |
|
"grad_norm": 0.5432227849960327, |
|
"learning_rate": 4.720784448573712e-06, |
|
"loss": 0.5003, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.3150470219435737, |
|
"grad_norm": 0.5111671686172485, |
|
"learning_rate": 4.717698966069739e-06, |
|
"loss": 0.5305, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.3197492163009406, |
|
"grad_norm": 0.5037922859191895, |
|
"learning_rate": 4.7145975487569965e-06, |
|
"loss": 0.5081, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.3244514106583072, |
|
"grad_norm": 0.7806075215339661, |
|
"learning_rate": 4.711480218920064e-06, |
|
"loss": 0.4698, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.329153605015674, |
|
"grad_norm": 0.5198766589164734, |
|
"learning_rate": 4.708346998957859e-06, |
|
"loss": 0.5188, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.3338557993730409, |
|
"grad_norm": 0.7277726531028748, |
|
"learning_rate": 4.705197911383473e-06, |
|
"loss": 0.4893, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.3385579937304075, |
|
"grad_norm": 0.4980437457561493, |
|
"learning_rate": 4.7020329788240115e-06, |
|
"loss": 0.476, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.3432601880877744, |
|
"grad_norm": 0.5318494439125061, |
|
"learning_rate": 4.6988522240204325e-06, |
|
"loss": 0.4853, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.347962382445141, |
|
"grad_norm": 0.5113736391067505, |
|
"learning_rate": 4.695655669827377e-06, |
|
"loss": 0.5005, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.3526645768025078, |
|
"grad_norm": 0.7522029876708984, |
|
"learning_rate": 4.6924433392130135e-06, |
|
"loss": 0.4924, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.3573667711598745, |
|
"grad_norm": 0.4861883521080017, |
|
"learning_rate": 4.689215255258866e-06, |
|
"loss": 0.5099, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.3620689655172413, |
|
"grad_norm": 0.46011000871658325, |
|
"learning_rate": 4.685971441159653e-06, |
|
"loss": 0.4792, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.3667711598746082, |
|
"grad_norm": 0.5154278874397278, |
|
"learning_rate": 4.682711920223115e-06, |
|
"loss": 0.4781, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.3714733542319748, |
|
"grad_norm": 0.45991218090057373, |
|
"learning_rate": 4.679436715869856e-06, |
|
"loss": 0.4989, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.3761755485893417, |
|
"grad_norm": 0.49241772294044495, |
|
"learning_rate": 4.676145851633166e-06, |
|
"loss": 0.5159, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.3808777429467085, |
|
"grad_norm": 0.523045003414154, |
|
"learning_rate": 4.672839351158856e-06, |
|
"loss": 0.5012, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.3855799373040751, |
|
"grad_norm": 0.5598923563957214, |
|
"learning_rate": 4.669517238205089e-06, |
|
"loss": 0.4855, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.390282131661442, |
|
"grad_norm": 0.507128894329071, |
|
"learning_rate": 4.666179536642208e-06, |
|
"loss": 0.4845, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.3949843260188088, |
|
"grad_norm": 0.721315860748291, |
|
"learning_rate": 4.662826270452565e-06, |
|
"loss": 0.4817, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.3996865203761755, |
|
"grad_norm": 0.560897171497345, |
|
"learning_rate": 4.659457463730347e-06, |
|
"loss": 0.4912, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.4043887147335423, |
|
"grad_norm": 0.5121816396713257, |
|
"learning_rate": 4.6560731406814056e-06, |
|
"loss": 0.5061, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.4090909090909092, |
|
"grad_norm": 0.4652915596961975, |
|
"learning_rate": 4.65267332562308e-06, |
|
"loss": 0.5088, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.4137931034482758, |
|
"grad_norm": 0.5149025321006775, |
|
"learning_rate": 4.649258042984026e-06, |
|
"loss": 0.5071, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.4184952978056427, |
|
"grad_norm": 0.4650140106678009, |
|
"learning_rate": 4.6458273173040395e-06, |
|
"loss": 0.4631, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.4231974921630095, |
|
"grad_norm": 0.9061777591705322, |
|
"learning_rate": 4.642381173233874e-06, |
|
"loss": 0.5008, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.4278996865203761, |
|
"grad_norm": 0.484729140996933, |
|
"learning_rate": 4.638919635535073e-06, |
|
"loss": 0.4564, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.432601880877743, |
|
"grad_norm": 1.1061334609985352, |
|
"learning_rate": 4.635442729079788e-06, |
|
"loss": 0.4843, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.4373040752351098, |
|
"grad_norm": 0.5028228163719177, |
|
"learning_rate": 4.6319504788505956e-06, |
|
"loss": 0.4794, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.4420062695924765, |
|
"grad_norm": 0.49323561787605286, |
|
"learning_rate": 4.628442909940325e-06, |
|
"loss": 0.4901, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.4467084639498433, |
|
"grad_norm": 0.5237039923667908, |
|
"learning_rate": 4.624920047551874e-06, |
|
"loss": 0.5079, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.4514106583072102, |
|
"grad_norm": 0.5407230854034424, |
|
"learning_rate": 4.621381916998029e-06, |
|
"loss": 0.4742, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.4561128526645768, |
|
"grad_norm": 0.4688400328159332, |
|
"learning_rate": 4.6178285437012806e-06, |
|
"loss": 0.5096, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.4608150470219436, |
|
"grad_norm": 0.6071098446846008, |
|
"learning_rate": 4.6142599531936435e-06, |
|
"loss": 0.4721, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.4655172413793103, |
|
"grad_norm": 0.47543954849243164, |
|
"learning_rate": 4.610676171116475e-06, |
|
"loss": 0.4924, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.4702194357366771, |
|
"grad_norm": 0.47207143902778625, |
|
"learning_rate": 4.607077223220286e-06, |
|
"loss": 0.4965, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.4749216300940438, |
|
"grad_norm": 0.5051419734954834, |
|
"learning_rate": 4.603463135364556e-06, |
|
"loss": 0.4671, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.4796238244514106, |
|
"grad_norm": 0.5427581667900085, |
|
"learning_rate": 4.5998339335175555e-06, |
|
"loss": 0.4896, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.4843260188087775, |
|
"grad_norm": 0.8648138642311096, |
|
"learning_rate": 4.596189643756147e-06, |
|
"loss": 0.4666, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.489028213166144, |
|
"grad_norm": 0.5034400224685669, |
|
"learning_rate": 4.592530292265609e-06, |
|
"loss": 0.4849, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.493730407523511, |
|
"grad_norm": 0.5227445960044861, |
|
"learning_rate": 4.58885590533944e-06, |
|
"loss": 0.4942, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.4984326018808778, |
|
"grad_norm": 0.4640982151031494, |
|
"learning_rate": 4.585166509379173e-06, |
|
"loss": 0.5165, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.5031347962382444, |
|
"grad_norm": 0.5094525814056396, |
|
"learning_rate": 4.581462130894186e-06, |
|
"loss": 0.4934, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.5078369905956113, |
|
"grad_norm": 0.5064478516578674, |
|
"learning_rate": 4.57774279650151e-06, |
|
"loss": 0.4847, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.5125391849529781, |
|
"grad_norm": 0.5131570100784302, |
|
"learning_rate": 4.574008532925638e-06, |
|
"loss": 0.5111, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.5172413793103448, |
|
"grad_norm": 0.5142849087715149, |
|
"learning_rate": 4.570259366998336e-06, |
|
"loss": 0.4953, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.5219435736677116, |
|
"grad_norm": 0.4943976402282715, |
|
"learning_rate": 4.566495325658445e-06, |
|
"loss": 0.5235, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.5266457680250785, |
|
"grad_norm": 0.46294504404067993, |
|
"learning_rate": 4.5627164359516915e-06, |
|
"loss": 0.505, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.531347962382445, |
|
"grad_norm": 0.6234234571456909, |
|
"learning_rate": 4.558922725030491e-06, |
|
"loss": 0.4776, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.536050156739812, |
|
"grad_norm": 0.5395891666412354, |
|
"learning_rate": 4.555114220153755e-06, |
|
"loss": 0.4313, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.5407523510971788, |
|
"grad_norm": 0.8332454562187195, |
|
"learning_rate": 4.551290948686693e-06, |
|
"loss": 0.5141, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.5454545454545454, |
|
"grad_norm": 0.4640675485134125, |
|
"learning_rate": 4.547452938100615e-06, |
|
"loss": 0.5178, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.5501567398119123, |
|
"grad_norm": 0.48269912600517273, |
|
"learning_rate": 4.54360021597274e-06, |
|
"loss": 0.4955, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.5548589341692791, |
|
"grad_norm": 0.5193467140197754, |
|
"learning_rate": 4.539732809985989e-06, |
|
"loss": 0.4864, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.5595611285266457, |
|
"grad_norm": 0.4845351576805115, |
|
"learning_rate": 4.535850747928796e-06, |
|
"loss": 0.5003, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.5642633228840124, |
|
"grad_norm": 0.5264540314674377, |
|
"learning_rate": 4.531954057694897e-06, |
|
"loss": 0.4828, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.5689655172413794, |
|
"grad_norm": 0.4795820415019989, |
|
"learning_rate": 4.5280427672831414e-06, |
|
"loss": 0.4904, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.573667711598746, |
|
"grad_norm": 0.5169357061386108, |
|
"learning_rate": 4.524116904797281e-06, |
|
"loss": 0.4661, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.5783699059561127, |
|
"grad_norm": 0.4765758514404297, |
|
"learning_rate": 4.520176498445774e-06, |
|
"loss": 0.4793, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.5830721003134798, |
|
"grad_norm": 0.5529272556304932, |
|
"learning_rate": 4.516221576541581e-06, |
|
"loss": 0.4793, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.5877742946708464, |
|
"grad_norm": 0.594944179058075, |
|
"learning_rate": 4.512252167501959e-06, |
|
"loss": 0.4786, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.592476489028213, |
|
"grad_norm": 0.4921863079071045, |
|
"learning_rate": 4.508268299848262e-06, |
|
"loss": 0.4854, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.59717868338558, |
|
"grad_norm": 0.4983494281768799, |
|
"learning_rate": 4.50427000220573e-06, |
|
"loss": 0.4999, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.6018808777429467, |
|
"grad_norm": 0.48530513048171997, |
|
"learning_rate": 4.50025730330329e-06, |
|
"loss": 0.477, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.6065830721003134, |
|
"grad_norm": 0.47819897532463074, |
|
"learning_rate": 4.4962302319733445e-06, |
|
"loss": 0.4933, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.6112852664576802, |
|
"grad_norm": 0.608711838722229, |
|
"learning_rate": 4.492188817151565e-06, |
|
"loss": 0.5285, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.615987460815047, |
|
"grad_norm": 0.5040555000305176, |
|
"learning_rate": 4.488133087876688e-06, |
|
"loss": 0.467, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.6206896551724137, |
|
"grad_norm": 0.4925503432750702, |
|
"learning_rate": 4.484063073290301e-06, |
|
"loss": 0.4651, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.6253918495297806, |
|
"grad_norm": 0.5364562273025513, |
|
"learning_rate": 4.479978802636637e-06, |
|
"loss": 0.5002, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.6300940438871474, |
|
"grad_norm": 0.5685595870018005, |
|
"learning_rate": 4.475880305262362e-06, |
|
"loss": 0.504, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.634796238244514, |
|
"grad_norm": 0.4894132912158966, |
|
"learning_rate": 4.471767610616366e-06, |
|
"loss": 0.4961, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.6394984326018809, |
|
"grad_norm": 0.5791006088256836, |
|
"learning_rate": 4.467640748249549e-06, |
|
"loss": 0.4706, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.6442006269592477, |
|
"grad_norm": 0.4814227521419525, |
|
"learning_rate": 4.4634997478146125e-06, |
|
"loss": 0.4895, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.6489028213166144, |
|
"grad_norm": 0.4842629134654999, |
|
"learning_rate": 4.459344639065842e-06, |
|
"loss": 0.482, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.6536050156739812, |
|
"grad_norm": 0.44065847992897034, |
|
"learning_rate": 4.455175451858897e-06, |
|
"loss": 0.4912, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.658307210031348, |
|
"grad_norm": 0.5212785601615906, |
|
"learning_rate": 4.450992216150592e-06, |
|
"loss": 0.4995, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.6630094043887147, |
|
"grad_norm": 0.6393409967422485, |
|
"learning_rate": 4.446794961998689e-06, |
|
"loss": 0.4675, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.6677115987460815, |
|
"grad_norm": 0.4786463975906372, |
|
"learning_rate": 4.442583719561671e-06, |
|
"loss": 0.4938, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.6724137931034484, |
|
"grad_norm": 0.5183210968971252, |
|
"learning_rate": 4.438358519098536e-06, |
|
"loss": 0.5032, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.677115987460815, |
|
"grad_norm": 0.7055342197418213, |
|
"learning_rate": 4.4341193909685685e-06, |
|
"loss": 0.4861, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.6818181818181817, |
|
"grad_norm": 0.4798656105995178, |
|
"learning_rate": 4.429866365631134e-06, |
|
"loss": 0.4917, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.6865203761755487, |
|
"grad_norm": 0.5035173296928406, |
|
"learning_rate": 4.425599473645447e-06, |
|
"loss": 0.4834, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.6912225705329154, |
|
"grad_norm": 0.5660416483879089, |
|
"learning_rate": 4.421318745670364e-06, |
|
"loss": 0.4829, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.695924764890282, |
|
"grad_norm": 0.7084828019142151, |
|
"learning_rate": 4.4170242124641524e-06, |
|
"loss": 0.4603, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.700626959247649, |
|
"grad_norm": 0.4513323903083801, |
|
"learning_rate": 4.412715904884277e-06, |
|
"loss": 0.4896, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.7053291536050157, |
|
"grad_norm": 0.48268529772758484, |
|
"learning_rate": 4.4083938538871735e-06, |
|
"loss": 0.4684, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.7100313479623823, |
|
"grad_norm": 0.4861423671245575, |
|
"learning_rate": 4.4040580905280295e-06, |
|
"loss": 0.4878, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.7147335423197492, |
|
"grad_norm": 0.48606055974960327, |
|
"learning_rate": 4.3997086459605586e-06, |
|
"loss": 0.4849, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.719435736677116, |
|
"grad_norm": 0.4912388026714325, |
|
"learning_rate": 4.395345551436779e-06, |
|
"loss": 0.509, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.7241379310344827, |
|
"grad_norm": 0.47986406087875366, |
|
"learning_rate": 4.390968838306788e-06, |
|
"loss": 0.4635, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.7288401253918495, |
|
"grad_norm": 0.5145572423934937, |
|
"learning_rate": 4.386578538018535e-06, |
|
"loss": 0.4631, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.7335423197492164, |
|
"grad_norm": 0.5538941621780396, |
|
"learning_rate": 4.382174682117598e-06, |
|
"loss": 0.5091, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.738244514106583, |
|
"grad_norm": 0.49918535351753235, |
|
"learning_rate": 4.377757302246956e-06, |
|
"loss": 0.4454, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.7429467084639498, |
|
"grad_norm": 0.48130425810813904, |
|
"learning_rate": 4.373326430146762e-06, |
|
"loss": 0.5011, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.7476489028213167, |
|
"grad_norm": 0.8833332657814026, |
|
"learning_rate": 4.368882097654113e-06, |
|
"loss": 0.4961, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.7523510971786833, |
|
"grad_norm": 0.45491841435432434, |
|
"learning_rate": 4.364424336702825e-06, |
|
"loss": 0.4708, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.7570532915360502, |
|
"grad_norm": 0.6433991193771362, |
|
"learning_rate": 4.3599531793232e-06, |
|
"loss": 0.488, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.761755485893417, |
|
"grad_norm": 0.49767547845840454, |
|
"learning_rate": 4.355468657641797e-06, |
|
"loss": 0.4836, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.7664576802507836, |
|
"grad_norm": 0.5577245950698853, |
|
"learning_rate": 4.3509708038812035e-06, |
|
"loss": 0.4879, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.7711598746081505, |
|
"grad_norm": 0.4875679314136505, |
|
"learning_rate": 4.346459650359798e-06, |
|
"loss": 0.4858, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.7758620689655173, |
|
"grad_norm": 0.47319531440734863, |
|
"learning_rate": 4.341935229491525e-06, |
|
"loss": 0.4554, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.780564263322884, |
|
"grad_norm": 0.7505528330802917, |
|
"learning_rate": 4.337397573785659e-06, |
|
"loss": 0.5043, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.7852664576802508, |
|
"grad_norm": 0.49199047684669495, |
|
"learning_rate": 4.332846715846566e-06, |
|
"loss": 0.4712, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.7899686520376177, |
|
"grad_norm": 0.5161461234092712, |
|
"learning_rate": 4.328282688373479e-06, |
|
"loss": 0.4927, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.7946708463949843, |
|
"grad_norm": 0.5262524485588074, |
|
"learning_rate": 4.323705524160258e-06, |
|
"loss": 0.4904, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.799373040752351, |
|
"grad_norm": 0.5441488027572632, |
|
"learning_rate": 4.319115256095149e-06, |
|
"loss": 0.4662, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.804075235109718, |
|
"grad_norm": 0.6795123815536499, |
|
"learning_rate": 4.314511917160557e-06, |
|
"loss": 0.4681, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.8087774294670846, |
|
"grad_norm": 0.47761398553848267, |
|
"learning_rate": 4.3098955404328045e-06, |
|
"loss": 0.4611, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.8134796238244513, |
|
"grad_norm": 0.7907284498214722, |
|
"learning_rate": 4.305266159081895e-06, |
|
"loss": 0.4826, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 0.52130126953125, |
|
"learning_rate": 4.3006238063712725e-06, |
|
"loss": 0.4671, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.822884012539185, |
|
"grad_norm": 0.8447866439819336, |
|
"learning_rate": 4.295968515657583e-06, |
|
"loss": 0.5014, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.8275862068965516, |
|
"grad_norm": 0.4711958169937134, |
|
"learning_rate": 4.29130032039044e-06, |
|
"loss": 0.4844, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.8322884012539185, |
|
"grad_norm": 0.5012089610099792, |
|
"learning_rate": 4.2866192541121755e-06, |
|
"loss": 0.4747, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.8369905956112853, |
|
"grad_norm": 0.5260939598083496, |
|
"learning_rate": 4.281925350457606e-06, |
|
"loss": 0.4778, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.841692789968652, |
|
"grad_norm": 0.4744386672973633, |
|
"learning_rate": 4.277218643153787e-06, |
|
"loss": 0.4795, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.8463949843260188, |
|
"grad_norm": 0.4848107695579529, |
|
"learning_rate": 4.272499166019771e-06, |
|
"loss": 0.4783, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.8510971786833856, |
|
"grad_norm": 0.48980602622032166, |
|
"learning_rate": 4.267766952966369e-06, |
|
"loss": 0.4665, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.8557993730407523, |
|
"grad_norm": 0.49879398941993713, |
|
"learning_rate": 4.2630220379959006e-06, |
|
"loss": 0.444, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.8605015673981191, |
|
"grad_norm": 0.5382682681083679, |
|
"learning_rate": 4.258264455201953e-06, |
|
"loss": 0.466, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.865203761755486, |
|
"grad_norm": 0.47293514013290405, |
|
"learning_rate": 4.2534942387691335e-06, |
|
"loss": 0.4915, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.8699059561128526, |
|
"grad_norm": 0.4993828237056732, |
|
"learning_rate": 4.248711422972829e-06, |
|
"loss": 0.4771, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.8746081504702194, |
|
"grad_norm": 0.4950089156627655, |
|
"learning_rate": 4.243916042178954e-06, |
|
"loss": 0.4614, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.8793103448275863, |
|
"grad_norm": 0.48008930683135986, |
|
"learning_rate": 4.239108130843709e-06, |
|
"loss": 0.4684, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.884012539184953, |
|
"grad_norm": 0.5422408580780029, |
|
"learning_rate": 4.234287723513326e-06, |
|
"loss": 0.4933, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.8887147335423198, |
|
"grad_norm": 0.5405403971672058, |
|
"learning_rate": 4.229454854823827e-06, |
|
"loss": 0.4912, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.8934169278996866, |
|
"grad_norm": 0.6024321913719177, |
|
"learning_rate": 4.224609559500772e-06, |
|
"loss": 0.504, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.8981191222570533, |
|
"grad_norm": 0.4791870415210724, |
|
"learning_rate": 4.21975187235901e-06, |
|
"loss": 0.4547, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.90282131661442, |
|
"grad_norm": 0.9783840775489807, |
|
"learning_rate": 4.21488182830243e-06, |
|
"loss": 0.483, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.907523510971787, |
|
"grad_norm": 1.0899996757507324, |
|
"learning_rate": 4.209999462323706e-06, |
|
"loss": 0.4603, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.9122257053291536, |
|
"grad_norm": 0.6221780180931091, |
|
"learning_rate": 4.20510480950405e-06, |
|
"loss": 0.4926, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.9169278996865202, |
|
"grad_norm": 0.671617329120636, |
|
"learning_rate": 4.200197905012961e-06, |
|
"loss": 0.4536, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.9216300940438873, |
|
"grad_norm": 0.48824113607406616, |
|
"learning_rate": 4.195278784107965e-06, |
|
"loss": 0.4709, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.926332288401254, |
|
"grad_norm": 0.4700049161911011, |
|
"learning_rate": 4.19034748213437e-06, |
|
"loss": 0.4721, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.9310344827586206, |
|
"grad_norm": 0.5228457450866699, |
|
"learning_rate": 4.185404034525008e-06, |
|
"loss": 0.4644, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.9357366771159876, |
|
"grad_norm": 0.5192087888717651, |
|
"learning_rate": 4.180448476799981e-06, |
|
"loss": 0.5018, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.9404388714733543, |
|
"grad_norm": 0.49069511890411377, |
|
"learning_rate": 4.175480844566404e-06, |
|
"loss": 0.4747, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.9451410658307209, |
|
"grad_norm": 0.45442187786102295, |
|
"learning_rate": 4.170501173518152e-06, |
|
"loss": 0.4683, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.9498432601880877, |
|
"grad_norm": 0.5145336985588074, |
|
"learning_rate": 4.165509499435604e-06, |
|
"loss": 0.4677, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.9545454545454546, |
|
"grad_norm": 0.514231264591217, |
|
"learning_rate": 4.16050585818538e-06, |
|
"loss": 0.4812, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.9592476489028212, |
|
"grad_norm": 0.5052164793014526, |
|
"learning_rate": 4.155490285720092e-06, |
|
"loss": 0.5056, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.963949843260188, |
|
"grad_norm": 0.5336954593658447, |
|
"learning_rate": 4.150462818078079e-06, |
|
"loss": 0.4916, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.968652037617555, |
|
"grad_norm": 0.5100321769714355, |
|
"learning_rate": 4.145423491383153e-06, |
|
"loss": 0.4869, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.9733542319749215, |
|
"grad_norm": 0.5358601212501526, |
|
"learning_rate": 4.14037234184433e-06, |
|
"loss": 0.5041, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.9780564263322884, |
|
"grad_norm": 0.4950590133666992, |
|
"learning_rate": 4.135309405755583e-06, |
|
"loss": 0.4889, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.9827586206896552, |
|
"grad_norm": 0.5121294856071472, |
|
"learning_rate": 4.130234719495574e-06, |
|
"loss": 0.4736, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.9874608150470219, |
|
"grad_norm": 0.5014020800590515, |
|
"learning_rate": 4.125148319527391e-06, |
|
"loss": 0.4635, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.9921630094043887, |
|
"grad_norm": 0.5898067355155945, |
|
"learning_rate": 4.1200502423982904e-06, |
|
"loss": 0.4842, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.9968652037617556, |
|
"grad_norm": 0.535094141960144, |
|
"learning_rate": 4.1149405247394295e-06, |
|
"loss": 0.4885, |
|
"step": 424 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1272, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 6, |
|
"save_steps": 212, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.7298140993196392e+19, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|