|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.6666666666666666, |
|
"eval_steps": 500, |
|
"global_step": 314, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0021231422505307855, |
|
"grad_norm": 6.233692311689662, |
|
"learning_rate": 0.0, |
|
"loss": 1.3677, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004246284501061571, |
|
"grad_norm": 6.0364996372518975, |
|
"learning_rate": 1.2499999999999999e-06, |
|
"loss": 1.4092, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.006369426751592357, |
|
"grad_norm": 5.824376154111525, |
|
"learning_rate": 2.4999999999999998e-06, |
|
"loss": 1.3994, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.008492569002123142, |
|
"grad_norm": 5.902606326788258, |
|
"learning_rate": 3.75e-06, |
|
"loss": 1.3973, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.010615711252653927, |
|
"grad_norm": 5.275994860996141, |
|
"learning_rate": 4.9999999999999996e-06, |
|
"loss": 1.3653, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.012738853503184714, |
|
"grad_norm": 4.1623214153554295, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.3567, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.014861995753715499, |
|
"grad_norm": 3.3671066127082088, |
|
"learning_rate": 7.5e-06, |
|
"loss": 1.2129, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.016985138004246284, |
|
"grad_norm": 2.97534257184618, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 1.2794, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01910828025477707, |
|
"grad_norm": 2.9074617656297654, |
|
"learning_rate": 9.999999999999999e-06, |
|
"loss": 1.2212, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.021231422505307854, |
|
"grad_norm": 3.829816734428447, |
|
"learning_rate": 1.125e-05, |
|
"loss": 1.2232, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02335456475583864, |
|
"grad_norm": 2.710794346051666, |
|
"learning_rate": 1.25e-05, |
|
"loss": 1.1805, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.025477707006369428, |
|
"grad_norm": 2.490537571865269, |
|
"learning_rate": 1.375e-05, |
|
"loss": 1.1395, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.027600849256900213, |
|
"grad_norm": 2.958343893823127, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.1996, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.029723991507430998, |
|
"grad_norm": 2.10711525179354, |
|
"learning_rate": 1.625e-05, |
|
"loss": 1.1154, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03184713375796178, |
|
"grad_norm": 1.804171701556794, |
|
"learning_rate": 1.7500000000000002e-05, |
|
"loss": 1.1335, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03397027600849257, |
|
"grad_norm": 1.3531726745218633, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 1.0982, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.036093418259023353, |
|
"grad_norm": 1.2822932286416207, |
|
"learning_rate": 1.9999999999999998e-05, |
|
"loss": 1.0626, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03821656050955414, |
|
"grad_norm": 1.2087880979605115, |
|
"learning_rate": 2.125e-05, |
|
"loss": 1.0372, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.040339702760084924, |
|
"grad_norm": 1.2492451284743782, |
|
"learning_rate": 2.25e-05, |
|
"loss": 1.0343, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04246284501061571, |
|
"grad_norm": 1.10733509517002, |
|
"learning_rate": 2.3749999999999998e-05, |
|
"loss": 0.9442, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.044585987261146494, |
|
"grad_norm": 1.0490035536866515, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.0626, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04670912951167728, |
|
"grad_norm": 0.9466743921266996, |
|
"learning_rate": 2.625e-05, |
|
"loss": 1.0386, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04883227176220807, |
|
"grad_norm": 0.9081034267931447, |
|
"learning_rate": 2.75e-05, |
|
"loss": 0.9487, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.050955414012738856, |
|
"grad_norm": 1.0372847960112026, |
|
"learning_rate": 2.875e-05, |
|
"loss": 0.9728, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05307855626326964, |
|
"grad_norm": 1.198809452316821, |
|
"learning_rate": 3e-05, |
|
"loss": 1.0076, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.055201698513800426, |
|
"grad_norm": 1.1303411718578629, |
|
"learning_rate": 2.9999629537566803e-05, |
|
"loss": 1.0052, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05732484076433121, |
|
"grad_norm": 0.9253246602948907, |
|
"learning_rate": 2.9998518168566207e-05, |
|
"loss": 1.0535, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.059447983014861996, |
|
"grad_norm": 0.8916584736775167, |
|
"learning_rate": 2.999666594789427e-05, |
|
"loss": 1.0575, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06157112526539278, |
|
"grad_norm": 1.017365856913298, |
|
"learning_rate": 2.999407296704142e-05, |
|
"loss": 1.0276, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06369426751592357, |
|
"grad_norm": 0.9179032077950415, |
|
"learning_rate": 2.9990739354087918e-05, |
|
"loss": 1.0334, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06581740976645435, |
|
"grad_norm": 0.9094459507546309, |
|
"learning_rate": 2.9986665273697548e-05, |
|
"loss": 1.0191, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.06794055201698514, |
|
"grad_norm": 0.8328888734598474, |
|
"learning_rate": 2.9981850927109472e-05, |
|
"loss": 1.0266, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.07006369426751592, |
|
"grad_norm": 0.9265287035853829, |
|
"learning_rate": 2.9976296552128305e-05, |
|
"loss": 0.9839, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.07218683651804671, |
|
"grad_norm": 0.8386673953378151, |
|
"learning_rate": 2.9970002423112342e-05, |
|
"loss": 0.9856, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07430997876857749, |
|
"grad_norm": 0.8773223216488372, |
|
"learning_rate": 2.996296885096003e-05, |
|
"loss": 0.9913, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07643312101910828, |
|
"grad_norm": 1.9920269675167115, |
|
"learning_rate": 2.9955196183094604e-05, |
|
"loss": 1.0276, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07855626326963906, |
|
"grad_norm": 0.8541287848004893, |
|
"learning_rate": 2.9946684803446928e-05, |
|
"loss": 0.9692, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.08067940552016985, |
|
"grad_norm": 0.8475286973659103, |
|
"learning_rate": 2.9937435132436517e-05, |
|
"loss": 0.9648, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.08280254777070063, |
|
"grad_norm": 0.891470065700579, |
|
"learning_rate": 2.9927447626950795e-05, |
|
"loss": 0.9469, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.08492569002123142, |
|
"grad_norm": 0.918643932520895, |
|
"learning_rate": 2.9916722780322504e-05, |
|
"loss": 1.0, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0870488322717622, |
|
"grad_norm": 0.89952358144301, |
|
"learning_rate": 2.9905261122305344e-05, |
|
"loss": 0.9591, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.08917197452229299, |
|
"grad_norm": 0.9534501125480317, |
|
"learning_rate": 2.9893063219047815e-05, |
|
"loss": 0.9638, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.09129511677282377, |
|
"grad_norm": 0.9410259599712866, |
|
"learning_rate": 2.988012967306524e-05, |
|
"loss": 0.9631, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.09341825902335456, |
|
"grad_norm": 0.7903964300371441, |
|
"learning_rate": 2.9866461123210005e-05, |
|
"loss": 1.0352, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09554140127388536, |
|
"grad_norm": 0.8186703306518206, |
|
"learning_rate": 2.985205824464001e-05, |
|
"loss": 1.035, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.09766454352441614, |
|
"grad_norm": 1.1828963478090526, |
|
"learning_rate": 2.983692174878531e-05, |
|
"loss": 1.0224, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.09978768577494693, |
|
"grad_norm": 1.0653437359039768, |
|
"learning_rate": 2.9821052383312987e-05, |
|
"loss": 0.9845, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.10191082802547771, |
|
"grad_norm": 0.9139888799070773, |
|
"learning_rate": 2.980445093209021e-05, |
|
"loss": 0.9882, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1040339702760085, |
|
"grad_norm": 1.0047461469584085, |
|
"learning_rate": 2.9787118215145502e-05, |
|
"loss": 0.9707, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.10615711252653928, |
|
"grad_norm": 0.9419818838483188, |
|
"learning_rate": 2.976905508862828e-05, |
|
"loss": 1.0093, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10828025477707007, |
|
"grad_norm": 0.8165141706193597, |
|
"learning_rate": 2.9750262444766502e-05, |
|
"loss": 0.9982, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.11040339702760085, |
|
"grad_norm": 0.9276179840515267, |
|
"learning_rate": 2.9730741211822654e-05, |
|
"loss": 1.0062, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.11252653927813164, |
|
"grad_norm": 0.7769869987656481, |
|
"learning_rate": 2.9710492354047857e-05, |
|
"loss": 0.9344, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.11464968152866242, |
|
"grad_norm": 0.8691988768513328, |
|
"learning_rate": 2.968951687163426e-05, |
|
"loss": 0.9953, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.11677282377919321, |
|
"grad_norm": 0.8209469205962703, |
|
"learning_rate": 2.9667815800665637e-05, |
|
"loss": 1.0306, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.11889596602972399, |
|
"grad_norm": 0.7477038589692927, |
|
"learning_rate": 2.9645390213066193e-05, |
|
"loss": 1.0604, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.12101910828025478, |
|
"grad_norm": 0.8205605966500025, |
|
"learning_rate": 2.9622241216547623e-05, |
|
"loss": 0.9994, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.12314225053078556, |
|
"grad_norm": 0.8004707255796194, |
|
"learning_rate": 2.9598369954554405e-05, |
|
"loss": 0.9995, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.12526539278131635, |
|
"grad_norm": 0.7081299100114782, |
|
"learning_rate": 2.957377760620732e-05, |
|
"loss": 0.9943, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.12738853503184713, |
|
"grad_norm": 0.7217649291059394, |
|
"learning_rate": 2.9548465386245185e-05, |
|
"loss": 0.987, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12951167728237792, |
|
"grad_norm": 0.8205996950485613, |
|
"learning_rate": 2.952243454496488e-05, |
|
"loss": 0.9136, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1316348195329087, |
|
"grad_norm": 0.7201961172816332, |
|
"learning_rate": 2.9495686368159592e-05, |
|
"loss": 0.976, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.1337579617834395, |
|
"grad_norm": 0.7696946636431424, |
|
"learning_rate": 2.946822217705526e-05, |
|
"loss": 0.9993, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.13588110403397027, |
|
"grad_norm": 0.7848756346253257, |
|
"learning_rate": 2.9440043328245366e-05, |
|
"loss": 1.0206, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.13800424628450106, |
|
"grad_norm": 0.740850768759331, |
|
"learning_rate": 2.9411151213623894e-05, |
|
"loss": 1.0311, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14012738853503184, |
|
"grad_norm": 0.7028782098692511, |
|
"learning_rate": 2.938154726031659e-05, |
|
"loss": 0.9681, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.14225053078556263, |
|
"grad_norm": 0.7087905746550794, |
|
"learning_rate": 2.9351232930610473e-05, |
|
"loss": 0.975, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.14437367303609341, |
|
"grad_norm": 0.7512258835012833, |
|
"learning_rate": 2.932020972188157e-05, |
|
"loss": 0.9703, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1464968152866242, |
|
"grad_norm": 0.6741016163735575, |
|
"learning_rate": 2.9288479166521014e-05, |
|
"loss": 0.9495, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.14861995753715498, |
|
"grad_norm": 0.740748414113653, |
|
"learning_rate": 2.9256042831859295e-05, |
|
"loss": 1.0603, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15074309978768577, |
|
"grad_norm": 0.7918692932011524, |
|
"learning_rate": 2.9222902320088882e-05, |
|
"loss": 1.0028, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.15286624203821655, |
|
"grad_norm": 0.759432829778867, |
|
"learning_rate": 2.9189059268185057e-05, |
|
"loss": 1.0205, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.15498938428874734, |
|
"grad_norm": 0.7025218211620403, |
|
"learning_rate": 2.9154515347825065e-05, |
|
"loss": 0.9696, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.15711252653927812, |
|
"grad_norm": 0.8396244762201516, |
|
"learning_rate": 2.9119272265305546e-05, |
|
"loss": 0.9472, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1592356687898089, |
|
"grad_norm": 0.7538155851518231, |
|
"learning_rate": 2.9083331761458247e-05, |
|
"loss": 0.992, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1613588110403397, |
|
"grad_norm": 0.8141973691781073, |
|
"learning_rate": 2.904669561156404e-05, |
|
"loss": 0.9097, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.16348195329087048, |
|
"grad_norm": 0.6587425676342493, |
|
"learning_rate": 2.9009365625265215e-05, |
|
"loss": 0.9604, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.16560509554140126, |
|
"grad_norm": 0.7407423574470945, |
|
"learning_rate": 2.8971343646476114e-05, |
|
"loss": 0.9779, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.16772823779193205, |
|
"grad_norm": 0.8004547291076868, |
|
"learning_rate": 2.893263155329204e-05, |
|
"loss": 0.9766, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.16985138004246284, |
|
"grad_norm": 0.7457062213618112, |
|
"learning_rate": 2.8893231257896502e-05, |
|
"loss": 1.0744, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17197452229299362, |
|
"grad_norm": 0.7225596471074269, |
|
"learning_rate": 2.8853144706466725e-05, |
|
"loss": 1.0006, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1740976645435244, |
|
"grad_norm": 0.9234525636361333, |
|
"learning_rate": 2.881237387907757e-05, |
|
"loss": 1.0595, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1762208067940552, |
|
"grad_norm": 0.7277875089674879, |
|
"learning_rate": 2.8770920789603687e-05, |
|
"loss": 0.9764, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.17834394904458598, |
|
"grad_norm": 0.7251182820248202, |
|
"learning_rate": 2.8728787485620068e-05, |
|
"loss": 1.0087, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.18046709129511676, |
|
"grad_norm": 0.7259248063014598, |
|
"learning_rate": 2.868597604830088e-05, |
|
"loss": 0.9895, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.18259023354564755, |
|
"grad_norm": 0.7236366037154788, |
|
"learning_rate": 2.864248859231669e-05, |
|
"loss": 1.0186, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.18471337579617833, |
|
"grad_norm": 0.7104922685407246, |
|
"learning_rate": 2.859832726573e-05, |
|
"loss": 1.0179, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.18683651804670912, |
|
"grad_norm": 0.6494155954698466, |
|
"learning_rate": 2.855349424988915e-05, |
|
"loss": 0.9532, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.18895966029723993, |
|
"grad_norm": 0.7642728344785427, |
|
"learning_rate": 2.8507991759320545e-05, |
|
"loss": 0.931, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1910828025477707, |
|
"grad_norm": 0.7579534251967283, |
|
"learning_rate": 2.8461822041619312e-05, |
|
"loss": 1.0023, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1932059447983015, |
|
"grad_norm": 0.700517970884762, |
|
"learning_rate": 2.841498737733824e-05, |
|
"loss": 0.9251, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.19532908704883228, |
|
"grad_norm": 0.7550458532390354, |
|
"learning_rate": 2.8367490079875154e-05, |
|
"loss": 0.9632, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.19745222929936307, |
|
"grad_norm": 0.7561266680050324, |
|
"learning_rate": 2.831933249535865e-05, |
|
"loss": 0.975, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.19957537154989385, |
|
"grad_norm": 0.7554339394976815, |
|
"learning_rate": 2.827051700253217e-05, |
|
"loss": 0.9574, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.20169851380042464, |
|
"grad_norm": 0.7519417055562898, |
|
"learning_rate": 2.8221046012636558e-05, |
|
"loss": 1.0089, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.20382165605095542, |
|
"grad_norm": 0.695300221944843, |
|
"learning_rate": 2.817092196929091e-05, |
|
"loss": 0.9586, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2059447983014862, |
|
"grad_norm": 0.7004398610006882, |
|
"learning_rate": 2.8120147348371912e-05, |
|
"loss": 0.9575, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.208067940552017, |
|
"grad_norm": 0.7161258126899045, |
|
"learning_rate": 2.8068724657891507e-05, |
|
"loss": 0.9429, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.21019108280254778, |
|
"grad_norm": 0.7498697224476674, |
|
"learning_rate": 2.801665643787303e-05, |
|
"loss": 1.0132, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.21231422505307856, |
|
"grad_norm": 0.6820422896195972, |
|
"learning_rate": 2.7963945260225748e-05, |
|
"loss": 1.0142, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.21443736730360935, |
|
"grad_norm": 0.6998151084514577, |
|
"learning_rate": 2.7910593728617813e-05, |
|
"loss": 1.0474, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.21656050955414013, |
|
"grad_norm": 0.6659748871284866, |
|
"learning_rate": 2.7856604478347655e-05, |
|
"loss": 0.964, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.21868365180467092, |
|
"grad_norm": 0.6639674612741819, |
|
"learning_rate": 2.7801980176213798e-05, |
|
"loss": 0.9141, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.2208067940552017, |
|
"grad_norm": 0.7962940595848361, |
|
"learning_rate": 2.7746723520383174e-05, |
|
"loss": 0.98, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.2229299363057325, |
|
"grad_norm": 0.6748458306743221, |
|
"learning_rate": 2.76908372402578e-05, |
|
"loss": 0.9387, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.22505307855626328, |
|
"grad_norm": 0.6591397874457393, |
|
"learning_rate": 2.763432409633998e-05, |
|
"loss": 0.9401, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.22717622080679406, |
|
"grad_norm": 0.6815163810802223, |
|
"learning_rate": 2.7577186880095966e-05, |
|
"loss": 0.9852, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.22929936305732485, |
|
"grad_norm": 0.6786470466077741, |
|
"learning_rate": 2.7519428413818034e-05, |
|
"loss": 0.9549, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.23142250530785563, |
|
"grad_norm": 0.722955020585509, |
|
"learning_rate": 2.7461051550485116e-05, |
|
"loss": 1.0021, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.23354564755838642, |
|
"grad_norm": 0.7518724654809783, |
|
"learning_rate": 2.740205917362186e-05, |
|
"loss": 1.0168, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2356687898089172, |
|
"grad_norm": 0.5989515308193117, |
|
"learning_rate": 2.7342454197156194e-05, |
|
"loss": 0.9608, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.23779193205944799, |
|
"grad_norm": 0.63739062901645, |
|
"learning_rate": 2.728223956527539e-05, |
|
"loss": 0.9779, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.23991507430997877, |
|
"grad_norm": 0.7107030178119403, |
|
"learning_rate": 2.722141825228066e-05, |
|
"loss": 0.9507, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.24203821656050956, |
|
"grad_norm": 0.6662094575605222, |
|
"learning_rate": 2.7159993262440228e-05, |
|
"loss": 0.9566, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.24416135881104034, |
|
"grad_norm": 0.684949594473451, |
|
"learning_rate": 2.7097967629840906e-05, |
|
"loss": 1.0072, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.24628450106157113, |
|
"grad_norm": 0.717234854863913, |
|
"learning_rate": 2.703534441823827e-05, |
|
"loss": 0.9774, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2484076433121019, |
|
"grad_norm": 0.6600997556047009, |
|
"learning_rate": 2.6972126720905293e-05, |
|
"loss": 0.9804, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2505307855626327, |
|
"grad_norm": 0.6717502933643555, |
|
"learning_rate": 2.6908317660479583e-05, |
|
"loss": 0.9805, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2526539278131635, |
|
"grad_norm": 0.6668903744774365, |
|
"learning_rate": 2.68439203888091e-05, |
|
"loss": 1.012, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.25477707006369427, |
|
"grad_norm": 0.7423058243771732, |
|
"learning_rate": 2.6778938086796512e-05, |
|
"loss": 1.0772, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.25690021231422505, |
|
"grad_norm": 0.7186526888110079, |
|
"learning_rate": 2.6713373964242043e-05, |
|
"loss": 0.9614, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.25902335456475584, |
|
"grad_norm": 0.7020168430150423, |
|
"learning_rate": 2.6647231259684946e-05, |
|
"loss": 0.9639, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2611464968152866, |
|
"grad_norm": 0.6506699216795314, |
|
"learning_rate": 2.6580513240243524e-05, |
|
"loss": 1.0157, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2632696390658174, |
|
"grad_norm": 0.6979496800781052, |
|
"learning_rate": 2.651322320145375e-05, |
|
"loss": 0.9827, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.2653927813163482, |
|
"grad_norm": 0.7041593201924282, |
|
"learning_rate": 2.6445364467106494e-05, |
|
"loss": 0.9416, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.267515923566879, |
|
"grad_norm": 0.7474469108757689, |
|
"learning_rate": 2.637694038908333e-05, |
|
"loss": 1.0345, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.26963906581740976, |
|
"grad_norm": 0.6614249491962885, |
|
"learning_rate": 2.630795434719099e-05, |
|
"loss": 0.9453, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.27176220806794055, |
|
"grad_norm": 0.6657326589669448, |
|
"learning_rate": 2.623840974899439e-05, |
|
"loss": 0.889, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.27388535031847133, |
|
"grad_norm": 0.8022831021638541, |
|
"learning_rate": 2.616831002964834e-05, |
|
"loss": 0.9739, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2760084925690021, |
|
"grad_norm": 0.7179257591811719, |
|
"learning_rate": 2.609765865172786e-05, |
|
"loss": 0.9843, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2781316348195329, |
|
"grad_norm": 0.605816781626969, |
|
"learning_rate": 2.6026459105057127e-05, |
|
"loss": 0.96, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2802547770700637, |
|
"grad_norm": 0.6689266661628013, |
|
"learning_rate": 2.5954714906537116e-05, |
|
"loss": 0.9829, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.2823779193205945, |
|
"grad_norm": 0.6414905131754337, |
|
"learning_rate": 2.5882429599971872e-05, |
|
"loss": 0.9315, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.28450106157112526, |
|
"grad_norm": 0.6383947258042313, |
|
"learning_rate": 2.580960675589347e-05, |
|
"loss": 1.0047, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.28662420382165604, |
|
"grad_norm": 0.6843220347965844, |
|
"learning_rate": 2.573624997138565e-05, |
|
"loss": 1.0134, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.28874734607218683, |
|
"grad_norm": 0.6783409077200856, |
|
"learning_rate": 2.5662362869906123e-05, |
|
"loss": 0.9991, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.2908704883227176, |
|
"grad_norm": 0.6082410802648054, |
|
"learning_rate": 2.558794910110761e-05, |
|
"loss": 0.9365, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.2929936305732484, |
|
"grad_norm": 0.6110004204749758, |
|
"learning_rate": 2.5513012340657557e-05, |
|
"loss": 0.9458, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.2951167728237792, |
|
"grad_norm": 0.6537047244906001, |
|
"learning_rate": 2.5437556290056575e-05, |
|
"loss": 0.9348, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.29723991507430997, |
|
"grad_norm": 0.6237035022111638, |
|
"learning_rate": 2.5361584676455627e-05, |
|
"loss": 0.9963, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.29936305732484075, |
|
"grad_norm": 0.658465414967936, |
|
"learning_rate": 2.5285101252471874e-05, |
|
"loss": 0.9365, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.30148619957537154, |
|
"grad_norm": 0.6275241420274199, |
|
"learning_rate": 2.5208109796003364e-05, |
|
"loss": 1.0091, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3036093418259023, |
|
"grad_norm": 0.6338044548670078, |
|
"learning_rate": 2.5130614110042415e-05, |
|
"loss": 1.0313, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.3057324840764331, |
|
"grad_norm": 0.6708789723264734, |
|
"learning_rate": 2.5052618022487733e-05, |
|
"loss": 0.9077, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.3078556263269639, |
|
"grad_norm": 0.6673243086867044, |
|
"learning_rate": 2.4974125385955374e-05, |
|
"loss": 0.9733, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3099787685774947, |
|
"grad_norm": 0.6708310719412189, |
|
"learning_rate": 2.4895140077588412e-05, |
|
"loss": 0.9224, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.31210191082802546, |
|
"grad_norm": 0.744990991437575, |
|
"learning_rate": 2.481566599886546e-05, |
|
"loss": 0.9491, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.31422505307855625, |
|
"grad_norm": 0.6620719317323561, |
|
"learning_rate": 2.473570707540793e-05, |
|
"loss": 1.0361, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.31634819532908703, |
|
"grad_norm": 0.6265774077983216, |
|
"learning_rate": 2.4655267256786126e-05, |
|
"loss": 1.0269, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3184713375796178, |
|
"grad_norm": 0.6861510574862938, |
|
"learning_rate": 2.4574350516324176e-05, |
|
"loss": 0.9546, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3205944798301486, |
|
"grad_norm": 0.6074369745423525, |
|
"learning_rate": 2.4492960850903757e-05, |
|
"loss": 0.9913, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.3227176220806794, |
|
"grad_norm": 0.6170833154929086, |
|
"learning_rate": 2.4411102280766658e-05, |
|
"loss": 0.9914, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3248407643312102, |
|
"grad_norm": 0.6453490886193386, |
|
"learning_rate": 2.432877884931623e-05, |
|
"loss": 1.0185, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.32696390658174096, |
|
"grad_norm": 0.6376312745074503, |
|
"learning_rate": 2.4245994622917636e-05, |
|
"loss": 1.014, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.32908704883227174, |
|
"grad_norm": 0.5742876859098425, |
|
"learning_rate": 2.4162753690696998e-05, |
|
"loss": 1.0054, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.33121019108280253, |
|
"grad_norm": 0.6636167519331397, |
|
"learning_rate": 2.4079060164339427e-05, |
|
"loss": 0.988, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.6393378453153665, |
|
"learning_rate": 2.3994918177885906e-05, |
|
"loss": 1.0362, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.3354564755838641, |
|
"grad_norm": 0.5723846211227461, |
|
"learning_rate": 2.391033188752911e-05, |
|
"loss": 0.9425, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.3375796178343949, |
|
"grad_norm": 0.6701423527493513, |
|
"learning_rate": 2.38253054714081e-05, |
|
"loss": 1.0224, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.33970276008492567, |
|
"grad_norm": 0.6385278318499282, |
|
"learning_rate": 2.3739843129401932e-05, |
|
"loss": 0.9562, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.34182590233545646, |
|
"grad_norm": 0.6587017894882612, |
|
"learning_rate": 2.365394908292224e-05, |
|
"loss": 0.9237, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.34394904458598724, |
|
"grad_norm": 0.6205619011806367, |
|
"learning_rate": 2.356762757470468e-05, |
|
"loss": 0.9645, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.346072186836518, |
|
"grad_norm": 0.6388406844797557, |
|
"learning_rate": 2.3480882868599383e-05, |
|
"loss": 0.9731, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.3481953290870488, |
|
"grad_norm": 0.6390704004642666, |
|
"learning_rate": 2.3393719249360335e-05, |
|
"loss": 1.0109, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.3503184713375796, |
|
"grad_norm": 0.6134473572194278, |
|
"learning_rate": 2.3306141022433728e-05, |
|
"loss": 1.0178, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3524416135881104, |
|
"grad_norm": 0.6031953979375974, |
|
"learning_rate": 2.3218152513745306e-05, |
|
"loss": 0.9301, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.35456475583864117, |
|
"grad_norm": 0.6766275451516183, |
|
"learning_rate": 2.3129758069486665e-05, |
|
"loss": 0.9615, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.35668789808917195, |
|
"grad_norm": 0.5864063100045787, |
|
"learning_rate": 2.3040962055900598e-05, |
|
"loss": 1.0087, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.35881104033970274, |
|
"grad_norm": 0.6410164877808251, |
|
"learning_rate": 2.2951768859065405e-05, |
|
"loss": 0.9828, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3609341825902335, |
|
"grad_norm": 0.6182510793860089, |
|
"learning_rate": 2.2862182884678243e-05, |
|
"loss": 0.914, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3630573248407643, |
|
"grad_norm": 0.6109084937070998, |
|
"learning_rate": 2.2772208557837526e-05, |
|
"loss": 0.9526, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3651804670912951, |
|
"grad_norm": 0.6117550209801461, |
|
"learning_rate": 2.2681850322824324e-05, |
|
"loss": 0.9419, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3673036093418259, |
|
"grad_norm": 0.6057950347730594, |
|
"learning_rate": 2.2591112642882853e-05, |
|
"loss": 0.9378, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.36942675159235666, |
|
"grad_norm": 0.6090436491314833, |
|
"learning_rate": 2.25e-05, |
|
"loss": 0.9761, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.37154989384288745, |
|
"grad_norm": 0.6192774184724117, |
|
"learning_rate": 2.2408516894683952e-05, |
|
"loss": 0.9504, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.37367303609341823, |
|
"grad_norm": 0.6513174814734923, |
|
"learning_rate": 2.2316667845741885e-05, |
|
"loss": 0.9883, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.37579617834394907, |
|
"grad_norm": 0.6560644930725986, |
|
"learning_rate": 2.2224457390056747e-05, |
|
"loss": 0.9457, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.37791932059447986, |
|
"grad_norm": 0.6116671564494237, |
|
"learning_rate": 2.2131890082363176e-05, |
|
"loss": 0.9849, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.38004246284501064, |
|
"grad_norm": 0.6596643709604704, |
|
"learning_rate": 2.2038970495022515e-05, |
|
"loss": 0.9239, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3821656050955414, |
|
"grad_norm": 0.6902003864146642, |
|
"learning_rate": 2.194570321779695e-05, |
|
"loss": 1.0142, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3842887473460722, |
|
"grad_norm": 0.6472331908547782, |
|
"learning_rate": 2.185209285762281e-05, |
|
"loss": 0.9807, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.386411889596603, |
|
"grad_norm": 0.5879920270887264, |
|
"learning_rate": 2.1758144038383e-05, |
|
"loss": 0.9983, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.3885350318471338, |
|
"grad_norm": 0.647220005434279, |
|
"learning_rate": 2.166386140067861e-05, |
|
"loss": 0.9361, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.39065817409766457, |
|
"grad_norm": 0.6422591531038985, |
|
"learning_rate": 2.1569249601599697e-05, |
|
"loss": 0.9166, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.39278131634819535, |
|
"grad_norm": 0.6068003709339048, |
|
"learning_rate": 2.147431331449522e-05, |
|
"loss": 0.987, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.39490445859872614, |
|
"grad_norm": 0.7073279288261982, |
|
"learning_rate": 2.1379057228742246e-05, |
|
"loss": 1.0215, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.3970276008492569, |
|
"grad_norm": 0.6123796585365825, |
|
"learning_rate": 2.128348604951428e-05, |
|
"loss": 1.0122, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3991507430997877, |
|
"grad_norm": 0.5646703919435287, |
|
"learning_rate": 2.1187604497548862e-05, |
|
"loss": 1.0163, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.4012738853503185, |
|
"grad_norm": 0.707419026365204, |
|
"learning_rate": 2.1091417308914406e-05, |
|
"loss": 0.9333, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.4033970276008493, |
|
"grad_norm": 0.55876064307062, |
|
"learning_rate": 2.0994929234776232e-05, |
|
"loss": 0.8764, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.40552016985138006, |
|
"grad_norm": 0.6189008896390565, |
|
"learning_rate": 2.089814504116191e-05, |
|
"loss": 0.9441, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.40764331210191085, |
|
"grad_norm": 0.6487599142290061, |
|
"learning_rate": 2.0801069508725815e-05, |
|
"loss": 0.9467, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.40976645435244163, |
|
"grad_norm": 0.5800499138520817, |
|
"learning_rate": 2.0703707432513006e-05, |
|
"loss": 0.9793, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.4118895966029724, |
|
"grad_norm": 0.6260444366432476, |
|
"learning_rate": 2.0606063621722374e-05, |
|
"loss": 0.9631, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.4140127388535032, |
|
"grad_norm": 0.6434863222057013, |
|
"learning_rate": 2.050814289946908e-05, |
|
"loss": 0.9217, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.416135881104034, |
|
"grad_norm": 0.6279442230026151, |
|
"learning_rate": 2.0409950102546334e-05, |
|
"loss": 0.9156, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.4182590233545648, |
|
"grad_norm": 0.6004879407537032, |
|
"learning_rate": 2.0311490081186458e-05, |
|
"loss": 0.9706, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.42038216560509556, |
|
"grad_norm": 0.6527914761227536, |
|
"learning_rate": 2.021276769882133e-05, |
|
"loss": 1.0022, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.42250530785562634, |
|
"grad_norm": 0.6666071133368344, |
|
"learning_rate": 2.0113787831842155e-05, |
|
"loss": 0.938, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.42462845010615713, |
|
"grad_norm": 0.613300047105262, |
|
"learning_rate": 2.0014555369358576e-05, |
|
"loss": 0.9555, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4267515923566879, |
|
"grad_norm": 0.6329025879345046, |
|
"learning_rate": 1.9915075212957192e-05, |
|
"loss": 0.974, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.4288747346072187, |
|
"grad_norm": 0.6460699216963086, |
|
"learning_rate": 1.9815352276459447e-05, |
|
"loss": 0.9687, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.4309978768577495, |
|
"grad_norm": 0.5693282535126375, |
|
"learning_rate": 1.9715391485678893e-05, |
|
"loss": 0.9225, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.43312101910828027, |
|
"grad_norm": 0.6028040675891717, |
|
"learning_rate": 1.961519777817791e-05, |
|
"loss": 0.9561, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.43524416135881105, |
|
"grad_norm": 0.5964782950749927, |
|
"learning_rate": 1.9514776103023783e-05, |
|
"loss": 1.0385, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.43736730360934184, |
|
"grad_norm": 0.6258614863873178, |
|
"learning_rate": 1.9414131420544258e-05, |
|
"loss": 0.9441, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.4394904458598726, |
|
"grad_norm": 0.5843981439628442, |
|
"learning_rate": 1.9313268702082536e-05, |
|
"loss": 0.9485, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.4416135881104034, |
|
"grad_norm": 0.6272278606324312, |
|
"learning_rate": 1.921219292975169e-05, |
|
"loss": 0.911, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.4437367303609342, |
|
"grad_norm": 0.571371168999436, |
|
"learning_rate": 1.9110909096188606e-05, |
|
"loss": 0.9161, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.445859872611465, |
|
"grad_norm": 0.6219809311888109, |
|
"learning_rate": 1.900942220430733e-05, |
|
"loss": 0.9759, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.44798301486199577, |
|
"grad_norm": 0.5924793719813729, |
|
"learning_rate": 1.890773726705198e-05, |
|
"loss": 0.9515, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.45010615711252655, |
|
"grad_norm": 0.6083549766898838, |
|
"learning_rate": 1.880585930714914e-05, |
|
"loss": 0.9277, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.45222929936305734, |
|
"grad_norm": 0.6382145126359631, |
|
"learning_rate": 1.8703793356859717e-05, |
|
"loss": 0.9268, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.4543524416135881, |
|
"grad_norm": 0.5977568426364863, |
|
"learning_rate": 1.8601544457730426e-05, |
|
"loss": 0.9081, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.4564755838641189, |
|
"grad_norm": 0.5970654264593069, |
|
"learning_rate": 1.849911766034472e-05, |
|
"loss": 0.9194, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.4585987261146497, |
|
"grad_norm": 0.6596541084844906, |
|
"learning_rate": 1.8396518024073356e-05, |
|
"loss": 0.9674, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.4607218683651805, |
|
"grad_norm": 0.6165089515789794, |
|
"learning_rate": 1.8293750616824443e-05, |
|
"loss": 1.0102, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.46284501061571126, |
|
"grad_norm": 0.5710130818893961, |
|
"learning_rate": 1.819082051479315e-05, |
|
"loss": 0.9414, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.46496815286624205, |
|
"grad_norm": 0.5709493042606094, |
|
"learning_rate": 1.8087732802210956e-05, |
|
"loss": 0.9246, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.46709129511677283, |
|
"grad_norm": 0.5713801994508075, |
|
"learning_rate": 1.7984492571094497e-05, |
|
"loss": 0.9442, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4692144373673036, |
|
"grad_norm": 0.6212813583103608, |
|
"learning_rate": 1.788110492099407e-05, |
|
"loss": 1.0118, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.4713375796178344, |
|
"grad_norm": 0.5542674209590273, |
|
"learning_rate": 1.777757495874174e-05, |
|
"loss": 0.8859, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.4734607218683652, |
|
"grad_norm": 0.6231448367618974, |
|
"learning_rate": 1.7673907798199054e-05, |
|
"loss": 0.9212, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.47558386411889597, |
|
"grad_norm": 0.5788006380629036, |
|
"learning_rate": 1.75701085600045e-05, |
|
"loss": 0.9956, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.47770700636942676, |
|
"grad_norm": 0.667025102007472, |
|
"learning_rate": 1.7466182371320518e-05, |
|
"loss": 0.9258, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.47983014861995754, |
|
"grad_norm": 0.5789996963104334, |
|
"learning_rate": 1.7362134365580268e-05, |
|
"loss": 0.922, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.4819532908704883, |
|
"grad_norm": 0.5380156350473858, |
|
"learning_rate": 1.7257969682234073e-05, |
|
"loss": 0.9583, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.4840764331210191, |
|
"grad_norm": 0.5920365256251041, |
|
"learning_rate": 1.7153693466495538e-05, |
|
"loss": 0.9588, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.4861995753715499, |
|
"grad_norm": 0.6254871880111138, |
|
"learning_rate": 1.7049310869087402e-05, |
|
"loss": 0.9793, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.4883227176220807, |
|
"grad_norm": 0.5740149209669835, |
|
"learning_rate": 1.6944827045987148e-05, |
|
"loss": 0.9483, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.49044585987261147, |
|
"grad_norm": 0.6132558502364226, |
|
"learning_rate": 1.6840247158172277e-05, |
|
"loss": 0.9082, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.49256900212314225, |
|
"grad_norm": 0.6259226448543737, |
|
"learning_rate": 1.673557637136542e-05, |
|
"loss": 1.0396, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.49469214437367304, |
|
"grad_norm": 1.0920922511810338, |
|
"learning_rate": 1.663081985577916e-05, |
|
"loss": 0.9112, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.4968152866242038, |
|
"grad_norm": 0.5773074384867334, |
|
"learning_rate": 1.652598278586065e-05, |
|
"loss": 0.8965, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4989384288747346, |
|
"grad_norm": 0.6309124283703914, |
|
"learning_rate": 1.6421070340036026e-05, |
|
"loss": 0.9624, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.5010615711252654, |
|
"grad_norm": 0.5941173912410761, |
|
"learning_rate": 1.631608770045461e-05, |
|
"loss": 0.8636, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.5031847133757962, |
|
"grad_norm": 0.6981644261413696, |
|
"learning_rate": 1.6211040052732958e-05, |
|
"loss": 0.9533, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.505307855626327, |
|
"grad_norm": 0.5925395527066952, |
|
"learning_rate": 1.6105932585698687e-05, |
|
"loss": 0.9811, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.5074309978768577, |
|
"grad_norm": 0.5889941819588222, |
|
"learning_rate": 1.6000770491134195e-05, |
|
"loss": 0.9276, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.5095541401273885, |
|
"grad_norm": 0.6054802307465149, |
|
"learning_rate": 1.5895558963520207e-05, |
|
"loss": 1.0013, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5116772823779193, |
|
"grad_norm": 0.5854921389762675, |
|
"learning_rate": 1.5790303199779194e-05, |
|
"loss": 0.938, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.5138004246284501, |
|
"grad_norm": 0.6156629079015952, |
|
"learning_rate": 1.568500839901867e-05, |
|
"loss": 0.8998, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.5159235668789809, |
|
"grad_norm": 0.5942832812673114, |
|
"learning_rate": 1.5579679762274377e-05, |
|
"loss": 0.906, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.5180467091295117, |
|
"grad_norm": 0.6245210048609385, |
|
"learning_rate": 1.5474322492253382e-05, |
|
"loss": 0.9881, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.5201698513800425, |
|
"grad_norm": 0.5813078674079436, |
|
"learning_rate": 1.5368941793077115e-05, |
|
"loss": 0.9948, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5222929936305732, |
|
"grad_norm": 0.5729126067790324, |
|
"learning_rate": 1.5263542870024263e-05, |
|
"loss": 0.9782, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.524416135881104, |
|
"grad_norm": 0.6115976256801431, |
|
"learning_rate": 1.5158130929273695e-05, |
|
"loss": 0.9586, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.5265392781316348, |
|
"grad_norm": 0.5677823931669936, |
|
"learning_rate": 1.505271117764728e-05, |
|
"loss": 0.9273, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.5286624203821656, |
|
"grad_norm": 0.6337456306602008, |
|
"learning_rate": 1.494728882235272e-05, |
|
"loss": 0.9653, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.5307855626326964, |
|
"grad_norm": 0.5908641833311927, |
|
"learning_rate": 1.4841869070726307e-05, |
|
"loss": 0.9783, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5329087048832272, |
|
"grad_norm": 0.6315479071635102, |
|
"learning_rate": 1.473645712997574e-05, |
|
"loss": 0.9341, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.535031847133758, |
|
"grad_norm": 0.6461862581364697, |
|
"learning_rate": 1.4631058206922886e-05, |
|
"loss": 0.9351, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.5371549893842887, |
|
"grad_norm": 0.6114227562639025, |
|
"learning_rate": 1.4525677507746615e-05, |
|
"loss": 0.9614, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.5392781316348195, |
|
"grad_norm": 0.5821157750989183, |
|
"learning_rate": 1.4420320237725632e-05, |
|
"loss": 1.0022, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.5414012738853503, |
|
"grad_norm": 0.6176480452727935, |
|
"learning_rate": 1.4314991600981336e-05, |
|
"loss": 0.9242, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5435244161358811, |
|
"grad_norm": 0.591935718739984, |
|
"learning_rate": 1.4209696800220807e-05, |
|
"loss": 0.9189, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.5456475583864119, |
|
"grad_norm": 0.6355341920694051, |
|
"learning_rate": 1.4104441036479797e-05, |
|
"loss": 0.9276, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.5477707006369427, |
|
"grad_norm": 0.6077185944726149, |
|
"learning_rate": 1.399922950886581e-05, |
|
"loss": 0.9697, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.5498938428874734, |
|
"grad_norm": 0.5590992275595242, |
|
"learning_rate": 1.3894067414301315e-05, |
|
"loss": 0.9377, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.5520169851380042, |
|
"grad_norm": 0.5998950581637562, |
|
"learning_rate": 1.3788959947267042e-05, |
|
"loss": 0.909, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.554140127388535, |
|
"grad_norm": 0.6965397105135438, |
|
"learning_rate": 1.3683912299545391e-05, |
|
"loss": 0.9523, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.5562632696390658, |
|
"grad_norm": 0.635211817521203, |
|
"learning_rate": 1.3578929659963977e-05, |
|
"loss": 0.9972, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.5583864118895966, |
|
"grad_norm": 0.5385969763281576, |
|
"learning_rate": 1.347401721413935e-05, |
|
"loss": 0.8788, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5605095541401274, |
|
"grad_norm": 0.5747284767597718, |
|
"learning_rate": 1.336918014422084e-05, |
|
"loss": 0.9345, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5626326963906582, |
|
"grad_norm": 0.6397363727759597, |
|
"learning_rate": 1.3264423628634583e-05, |
|
"loss": 0.9852, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.564755838641189, |
|
"grad_norm": 0.6401143379139339, |
|
"learning_rate": 1.3159752841827724e-05, |
|
"loss": 0.9803, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.5668789808917197, |
|
"grad_norm": 0.5704645508381336, |
|
"learning_rate": 1.3055172954012856e-05, |
|
"loss": 0.9394, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.5690021231422505, |
|
"grad_norm": 0.5783312592479926, |
|
"learning_rate": 1.2950689130912599e-05, |
|
"loss": 1.0313, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.5711252653927813, |
|
"grad_norm": 0.5472325263398086, |
|
"learning_rate": 1.2846306533504465e-05, |
|
"loss": 0.9307, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.5732484076433121, |
|
"grad_norm": 0.5949754852554261, |
|
"learning_rate": 1.2742030317765933e-05, |
|
"loss": 0.9463, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5753715498938429, |
|
"grad_norm": 0.5642237939718832, |
|
"learning_rate": 1.2637865634419735e-05, |
|
"loss": 0.9339, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.5774946921443737, |
|
"grad_norm": 0.5950396421063334, |
|
"learning_rate": 1.2533817628679485e-05, |
|
"loss": 0.9871, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.5796178343949044, |
|
"grad_norm": 0.555308435607654, |
|
"learning_rate": 1.24298914399955e-05, |
|
"loss": 0.9088, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.5817409766454352, |
|
"grad_norm": 0.5543717603232199, |
|
"learning_rate": 1.2326092201800948e-05, |
|
"loss": 0.9858, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.583864118895966, |
|
"grad_norm": 0.5768748399190285, |
|
"learning_rate": 1.2222425041258267e-05, |
|
"loss": 0.9933, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5859872611464968, |
|
"grad_norm": 0.6144146862154265, |
|
"learning_rate": 1.2118895079005929e-05, |
|
"loss": 0.966, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.5881104033970276, |
|
"grad_norm": 0.6371726541609838, |
|
"learning_rate": 1.2015507428905509e-05, |
|
"loss": 0.9366, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5902335456475584, |
|
"grad_norm": 0.5757938188544394, |
|
"learning_rate": 1.1912267197789047e-05, |
|
"loss": 0.9364, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5923566878980892, |
|
"grad_norm": 0.6158043621368744, |
|
"learning_rate": 1.1809179485206847e-05, |
|
"loss": 0.9741, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.5944798301486199, |
|
"grad_norm": 0.5414978480240109, |
|
"learning_rate": 1.1706249383175558e-05, |
|
"loss": 0.9869, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5966029723991507, |
|
"grad_norm": 0.6152469998780976, |
|
"learning_rate": 1.1603481975926643e-05, |
|
"loss": 0.9781, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5987261146496815, |
|
"grad_norm": 0.6082547306754248, |
|
"learning_rate": 1.1500882339655278e-05, |
|
"loss": 0.9069, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.6008492569002123, |
|
"grad_norm": 0.6137321132683284, |
|
"learning_rate": 1.1398455542269578e-05, |
|
"loss": 0.9701, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.6029723991507431, |
|
"grad_norm": 0.5745752002120236, |
|
"learning_rate": 1.1296206643140284e-05, |
|
"loss": 0.9598, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.6050955414012739, |
|
"grad_norm": 0.7041392825748041, |
|
"learning_rate": 1.1194140692850863e-05, |
|
"loss": 0.979, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6072186836518046, |
|
"grad_norm": 0.5744290113136731, |
|
"learning_rate": 1.1092262732948017e-05, |
|
"loss": 0.9193, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.6093418259023354, |
|
"grad_norm": 0.5425442729898244, |
|
"learning_rate": 1.0990577795692676e-05, |
|
"loss": 0.9141, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.6114649681528662, |
|
"grad_norm": 0.5687470133452739, |
|
"learning_rate": 1.0889090903811397e-05, |
|
"loss": 0.958, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.613588110403397, |
|
"grad_norm": 0.567846934811383, |
|
"learning_rate": 1.0787807070248306e-05, |
|
"loss": 0.8882, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.6157112526539278, |
|
"grad_norm": 0.5646293628586256, |
|
"learning_rate": 1.068673129791747e-05, |
|
"loss": 0.9476, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6178343949044586, |
|
"grad_norm": 0.5568078722816252, |
|
"learning_rate": 1.0585868579455745e-05, |
|
"loss": 0.9549, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.6199575371549894, |
|
"grad_norm": 0.5564148135548681, |
|
"learning_rate": 1.0485223896976221e-05, |
|
"loss": 0.9683, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.6220806794055201, |
|
"grad_norm": 0.5779558456789065, |
|
"learning_rate": 1.0384802221822098e-05, |
|
"loss": 0.981, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.6242038216560509, |
|
"grad_norm": 0.579183894606748, |
|
"learning_rate": 1.028460851432111e-05, |
|
"loss": 0.8776, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.6263269639065817, |
|
"grad_norm": 0.5562390759302435, |
|
"learning_rate": 1.0184647723540557e-05, |
|
"loss": 0.9366, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6284501061571125, |
|
"grad_norm": 0.6039425393322856, |
|
"learning_rate": 1.0084924787042809e-05, |
|
"loss": 0.9764, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.6305732484076433, |
|
"grad_norm": 0.5990295929838292, |
|
"learning_rate": 9.985444630641426e-06, |
|
"loss": 0.9575, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.6326963906581741, |
|
"grad_norm": 0.5334970805071061, |
|
"learning_rate": 9.886212168157848e-06, |
|
"loss": 0.9255, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.6348195329087049, |
|
"grad_norm": 0.5458651078507479, |
|
"learning_rate": 9.787232301178669e-06, |
|
"loss": 0.9658, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.6369426751592356, |
|
"grad_norm": 0.570409644049972, |
|
"learning_rate": 9.688509918813547e-06, |
|
"loss": 0.9055, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6390658174097664, |
|
"grad_norm": 0.559661538879096, |
|
"learning_rate": 9.590049897453668e-06, |
|
"loss": 0.9829, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.6411889596602972, |
|
"grad_norm": 0.5594976911463373, |
|
"learning_rate": 9.491857100530919e-06, |
|
"loss": 0.9484, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.643312101910828, |
|
"grad_norm": 0.5272576598614247, |
|
"learning_rate": 9.393936378277632e-06, |
|
"loss": 0.9366, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.6454352441613588, |
|
"grad_norm": 0.5932545624966695, |
|
"learning_rate": 9.296292567486997e-06, |
|
"loss": 0.9009, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.6475583864118896, |
|
"grad_norm": 0.564218312285332, |
|
"learning_rate": 9.198930491274188e-06, |
|
"loss": 0.922, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6496815286624203, |
|
"grad_norm": 0.556568715801767, |
|
"learning_rate": 9.10185495883809e-06, |
|
"loss": 0.9486, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.6518046709129511, |
|
"grad_norm": 0.545963447620699, |
|
"learning_rate": 9.005070765223768e-06, |
|
"loss": 0.9501, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.6539278131634819, |
|
"grad_norm": 0.5361391538001072, |
|
"learning_rate": 8.908582691085593e-06, |
|
"loss": 0.9539, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.6560509554140127, |
|
"grad_norm": 0.5993729956874576, |
|
"learning_rate": 8.812395502451139e-06, |
|
"loss": 1.0198, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.6581740976645435, |
|
"grad_norm": 0.5804015733760072, |
|
"learning_rate": 8.716513950485725e-06, |
|
"loss": 0.9764, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6602972399150743, |
|
"grad_norm": 0.6255942117075288, |
|
"learning_rate": 8.620942771257755e-06, |
|
"loss": 0.926, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.6624203821656051, |
|
"grad_norm": 0.5661900360842226, |
|
"learning_rate": 8.525686685504781e-06, |
|
"loss": 0.8951, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.6645435244161358, |
|
"grad_norm": 0.5897078527410758, |
|
"learning_rate": 8.430750398400309e-06, |
|
"loss": 0.9424, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.5099303542607264, |
|
"learning_rate": 8.336138599321391e-06, |
|
"loss": 0.9596, |
|
"step": 314 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 471, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 157, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 46655615139840.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|