|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.3333333333333333, |
|
"eval_steps": 500, |
|
"global_step": 157, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0021231422505307855, |
|
"grad_norm": 6.233692311689662, |
|
"learning_rate": 0.0, |
|
"loss": 1.3677, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004246284501061571, |
|
"grad_norm": 6.0364996372518975, |
|
"learning_rate": 1.2499999999999999e-06, |
|
"loss": 1.4092, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.006369426751592357, |
|
"grad_norm": 5.824376154111525, |
|
"learning_rate": 2.4999999999999998e-06, |
|
"loss": 1.3994, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.008492569002123142, |
|
"grad_norm": 5.902606326788258, |
|
"learning_rate": 3.75e-06, |
|
"loss": 1.3973, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.010615711252653927, |
|
"grad_norm": 5.275994860996141, |
|
"learning_rate": 4.9999999999999996e-06, |
|
"loss": 1.3653, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.012738853503184714, |
|
"grad_norm": 4.1623214153554295, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.3567, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.014861995753715499, |
|
"grad_norm": 3.3671066127082088, |
|
"learning_rate": 7.5e-06, |
|
"loss": 1.2129, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.016985138004246284, |
|
"grad_norm": 2.97534257184618, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 1.2794, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01910828025477707, |
|
"grad_norm": 2.9074617656297654, |
|
"learning_rate": 9.999999999999999e-06, |
|
"loss": 1.2212, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.021231422505307854, |
|
"grad_norm": 3.829816734428447, |
|
"learning_rate": 1.125e-05, |
|
"loss": 1.2232, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02335456475583864, |
|
"grad_norm": 2.710794346051666, |
|
"learning_rate": 1.25e-05, |
|
"loss": 1.1805, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.025477707006369428, |
|
"grad_norm": 2.490537571865269, |
|
"learning_rate": 1.375e-05, |
|
"loss": 1.1395, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.027600849256900213, |
|
"grad_norm": 2.958343893823127, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.1996, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.029723991507430998, |
|
"grad_norm": 2.10711525179354, |
|
"learning_rate": 1.625e-05, |
|
"loss": 1.1154, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03184713375796178, |
|
"grad_norm": 1.804171701556794, |
|
"learning_rate": 1.7500000000000002e-05, |
|
"loss": 1.1335, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03397027600849257, |
|
"grad_norm": 1.3531726745218633, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 1.0982, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.036093418259023353, |
|
"grad_norm": 1.2822932286416207, |
|
"learning_rate": 1.9999999999999998e-05, |
|
"loss": 1.0626, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03821656050955414, |
|
"grad_norm": 1.2087880979605115, |
|
"learning_rate": 2.125e-05, |
|
"loss": 1.0372, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.040339702760084924, |
|
"grad_norm": 1.2492451284743782, |
|
"learning_rate": 2.25e-05, |
|
"loss": 1.0343, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04246284501061571, |
|
"grad_norm": 1.10733509517002, |
|
"learning_rate": 2.3749999999999998e-05, |
|
"loss": 0.9442, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.044585987261146494, |
|
"grad_norm": 1.0490035536866515, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.0626, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04670912951167728, |
|
"grad_norm": 0.9466743921266996, |
|
"learning_rate": 2.625e-05, |
|
"loss": 1.0386, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04883227176220807, |
|
"grad_norm": 0.9081034267931447, |
|
"learning_rate": 2.75e-05, |
|
"loss": 0.9487, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.050955414012738856, |
|
"grad_norm": 1.0372847960112026, |
|
"learning_rate": 2.875e-05, |
|
"loss": 0.9728, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05307855626326964, |
|
"grad_norm": 1.198809452316821, |
|
"learning_rate": 3e-05, |
|
"loss": 1.0076, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.055201698513800426, |
|
"grad_norm": 1.1303411718578629, |
|
"learning_rate": 2.9999629537566803e-05, |
|
"loss": 1.0052, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05732484076433121, |
|
"grad_norm": 0.9253246602948907, |
|
"learning_rate": 2.9998518168566207e-05, |
|
"loss": 1.0535, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.059447983014861996, |
|
"grad_norm": 0.8916584736775167, |
|
"learning_rate": 2.999666594789427e-05, |
|
"loss": 1.0575, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06157112526539278, |
|
"grad_norm": 1.017365856913298, |
|
"learning_rate": 2.999407296704142e-05, |
|
"loss": 1.0276, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06369426751592357, |
|
"grad_norm": 0.9179032077950415, |
|
"learning_rate": 2.9990739354087918e-05, |
|
"loss": 1.0334, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06581740976645435, |
|
"grad_norm": 0.9094459507546309, |
|
"learning_rate": 2.9986665273697548e-05, |
|
"loss": 1.0191, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.06794055201698514, |
|
"grad_norm": 0.8328888734598474, |
|
"learning_rate": 2.9981850927109472e-05, |
|
"loss": 1.0266, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.07006369426751592, |
|
"grad_norm": 0.9265287035853829, |
|
"learning_rate": 2.9976296552128305e-05, |
|
"loss": 0.9839, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.07218683651804671, |
|
"grad_norm": 0.8386673953378151, |
|
"learning_rate": 2.9970002423112342e-05, |
|
"loss": 0.9856, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07430997876857749, |
|
"grad_norm": 0.8773223216488372, |
|
"learning_rate": 2.996296885096003e-05, |
|
"loss": 0.9913, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07643312101910828, |
|
"grad_norm": 1.9920269675167115, |
|
"learning_rate": 2.9955196183094604e-05, |
|
"loss": 1.0276, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07855626326963906, |
|
"grad_norm": 0.8541287848004893, |
|
"learning_rate": 2.9946684803446928e-05, |
|
"loss": 0.9692, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.08067940552016985, |
|
"grad_norm": 0.8475286973659103, |
|
"learning_rate": 2.9937435132436517e-05, |
|
"loss": 0.9648, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.08280254777070063, |
|
"grad_norm": 0.891470065700579, |
|
"learning_rate": 2.9927447626950795e-05, |
|
"loss": 0.9469, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.08492569002123142, |
|
"grad_norm": 0.918643932520895, |
|
"learning_rate": 2.9916722780322504e-05, |
|
"loss": 1.0, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0870488322717622, |
|
"grad_norm": 0.89952358144301, |
|
"learning_rate": 2.9905261122305344e-05, |
|
"loss": 0.9591, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.08917197452229299, |
|
"grad_norm": 0.9534501125480317, |
|
"learning_rate": 2.9893063219047815e-05, |
|
"loss": 0.9638, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.09129511677282377, |
|
"grad_norm": 0.9410259599712866, |
|
"learning_rate": 2.988012967306524e-05, |
|
"loss": 0.9631, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.09341825902335456, |
|
"grad_norm": 0.7903964300371441, |
|
"learning_rate": 2.9866461123210005e-05, |
|
"loss": 1.0352, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09554140127388536, |
|
"grad_norm": 0.8186703306518206, |
|
"learning_rate": 2.985205824464001e-05, |
|
"loss": 1.035, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.09766454352441614, |
|
"grad_norm": 1.1828963478090526, |
|
"learning_rate": 2.983692174878531e-05, |
|
"loss": 1.0224, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.09978768577494693, |
|
"grad_norm": 1.0653437359039768, |
|
"learning_rate": 2.9821052383312987e-05, |
|
"loss": 0.9845, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.10191082802547771, |
|
"grad_norm": 0.9139888799070773, |
|
"learning_rate": 2.980445093209021e-05, |
|
"loss": 0.9882, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1040339702760085, |
|
"grad_norm": 1.0047461469584085, |
|
"learning_rate": 2.9787118215145502e-05, |
|
"loss": 0.9707, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.10615711252653928, |
|
"grad_norm": 0.9419818838483188, |
|
"learning_rate": 2.976905508862828e-05, |
|
"loss": 1.0093, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10828025477707007, |
|
"grad_norm": 0.8165141706193597, |
|
"learning_rate": 2.9750262444766502e-05, |
|
"loss": 0.9982, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.11040339702760085, |
|
"grad_norm": 0.9276179840515267, |
|
"learning_rate": 2.9730741211822654e-05, |
|
"loss": 1.0062, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.11252653927813164, |
|
"grad_norm": 0.7769869987656481, |
|
"learning_rate": 2.9710492354047857e-05, |
|
"loss": 0.9344, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.11464968152866242, |
|
"grad_norm": 0.8691988768513328, |
|
"learning_rate": 2.968951687163426e-05, |
|
"loss": 0.9953, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.11677282377919321, |
|
"grad_norm": 0.8209469205962703, |
|
"learning_rate": 2.9667815800665637e-05, |
|
"loss": 1.0306, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.11889596602972399, |
|
"grad_norm": 0.7477038589692927, |
|
"learning_rate": 2.9645390213066193e-05, |
|
"loss": 1.0604, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.12101910828025478, |
|
"grad_norm": 0.8205605966500025, |
|
"learning_rate": 2.9622241216547623e-05, |
|
"loss": 0.9994, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.12314225053078556, |
|
"grad_norm": 0.8004707255796194, |
|
"learning_rate": 2.9598369954554405e-05, |
|
"loss": 0.9995, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.12526539278131635, |
|
"grad_norm": 0.7081299100114782, |
|
"learning_rate": 2.957377760620732e-05, |
|
"loss": 0.9943, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.12738853503184713, |
|
"grad_norm": 0.7217649291059394, |
|
"learning_rate": 2.9548465386245185e-05, |
|
"loss": 0.987, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12951167728237792, |
|
"grad_norm": 0.8205996950485613, |
|
"learning_rate": 2.952243454496488e-05, |
|
"loss": 0.9136, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1316348195329087, |
|
"grad_norm": 0.7201961172816332, |
|
"learning_rate": 2.9495686368159592e-05, |
|
"loss": 0.976, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.1337579617834395, |
|
"grad_norm": 0.7696946636431424, |
|
"learning_rate": 2.946822217705526e-05, |
|
"loss": 0.9993, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.13588110403397027, |
|
"grad_norm": 0.7848756346253257, |
|
"learning_rate": 2.9440043328245366e-05, |
|
"loss": 1.0206, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.13800424628450106, |
|
"grad_norm": 0.740850768759331, |
|
"learning_rate": 2.9411151213623894e-05, |
|
"loss": 1.0311, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14012738853503184, |
|
"grad_norm": 0.7028782098692511, |
|
"learning_rate": 2.938154726031659e-05, |
|
"loss": 0.9681, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.14225053078556263, |
|
"grad_norm": 0.7087905746550794, |
|
"learning_rate": 2.9351232930610473e-05, |
|
"loss": 0.975, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.14437367303609341, |
|
"grad_norm": 0.7512258835012833, |
|
"learning_rate": 2.932020972188157e-05, |
|
"loss": 0.9703, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1464968152866242, |
|
"grad_norm": 0.6741016163735575, |
|
"learning_rate": 2.9288479166521014e-05, |
|
"loss": 0.9495, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.14861995753715498, |
|
"grad_norm": 0.740748414113653, |
|
"learning_rate": 2.9256042831859295e-05, |
|
"loss": 1.0603, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15074309978768577, |
|
"grad_norm": 0.7918692932011524, |
|
"learning_rate": 2.9222902320088882e-05, |
|
"loss": 1.0028, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.15286624203821655, |
|
"grad_norm": 0.759432829778867, |
|
"learning_rate": 2.9189059268185057e-05, |
|
"loss": 1.0205, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.15498938428874734, |
|
"grad_norm": 0.7025218211620403, |
|
"learning_rate": 2.9154515347825065e-05, |
|
"loss": 0.9696, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.15711252653927812, |
|
"grad_norm": 0.8396244762201516, |
|
"learning_rate": 2.9119272265305546e-05, |
|
"loss": 0.9472, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1592356687898089, |
|
"grad_norm": 0.7538155851518231, |
|
"learning_rate": 2.9083331761458247e-05, |
|
"loss": 0.992, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1613588110403397, |
|
"grad_norm": 0.8141973691781073, |
|
"learning_rate": 2.904669561156404e-05, |
|
"loss": 0.9097, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.16348195329087048, |
|
"grad_norm": 0.6587425676342493, |
|
"learning_rate": 2.9009365625265215e-05, |
|
"loss": 0.9604, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.16560509554140126, |
|
"grad_norm": 0.7407423574470945, |
|
"learning_rate": 2.8971343646476114e-05, |
|
"loss": 0.9779, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.16772823779193205, |
|
"grad_norm": 0.8004547291076868, |
|
"learning_rate": 2.893263155329204e-05, |
|
"loss": 0.9766, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.16985138004246284, |
|
"grad_norm": 0.7457062213618112, |
|
"learning_rate": 2.8893231257896502e-05, |
|
"loss": 1.0744, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17197452229299362, |
|
"grad_norm": 0.7225596471074269, |
|
"learning_rate": 2.8853144706466725e-05, |
|
"loss": 1.0006, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1740976645435244, |
|
"grad_norm": 0.9234525636361333, |
|
"learning_rate": 2.881237387907757e-05, |
|
"loss": 1.0595, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1762208067940552, |
|
"grad_norm": 0.7277875089674879, |
|
"learning_rate": 2.8770920789603687e-05, |
|
"loss": 0.9764, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.17834394904458598, |
|
"grad_norm": 0.7251182820248202, |
|
"learning_rate": 2.8728787485620068e-05, |
|
"loss": 1.0087, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.18046709129511676, |
|
"grad_norm": 0.7259248063014598, |
|
"learning_rate": 2.868597604830088e-05, |
|
"loss": 0.9895, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.18259023354564755, |
|
"grad_norm": 0.7236366037154788, |
|
"learning_rate": 2.864248859231669e-05, |
|
"loss": 1.0186, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.18471337579617833, |
|
"grad_norm": 0.7104922685407246, |
|
"learning_rate": 2.859832726573e-05, |
|
"loss": 1.0179, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.18683651804670912, |
|
"grad_norm": 0.6494155954698466, |
|
"learning_rate": 2.855349424988915e-05, |
|
"loss": 0.9532, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.18895966029723993, |
|
"grad_norm": 0.7642728344785427, |
|
"learning_rate": 2.8507991759320545e-05, |
|
"loss": 0.931, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1910828025477707, |
|
"grad_norm": 0.7579534251967283, |
|
"learning_rate": 2.8461822041619312e-05, |
|
"loss": 1.0023, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1932059447983015, |
|
"grad_norm": 0.700517970884762, |
|
"learning_rate": 2.841498737733824e-05, |
|
"loss": 0.9251, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.19532908704883228, |
|
"grad_norm": 0.7550458532390354, |
|
"learning_rate": 2.8367490079875154e-05, |
|
"loss": 0.9632, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.19745222929936307, |
|
"grad_norm": 0.7561266680050324, |
|
"learning_rate": 2.831933249535865e-05, |
|
"loss": 0.975, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.19957537154989385, |
|
"grad_norm": 0.7554339394976815, |
|
"learning_rate": 2.827051700253217e-05, |
|
"loss": 0.9574, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.20169851380042464, |
|
"grad_norm": 0.7519417055562898, |
|
"learning_rate": 2.8221046012636558e-05, |
|
"loss": 1.0089, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.20382165605095542, |
|
"grad_norm": 0.695300221944843, |
|
"learning_rate": 2.817092196929091e-05, |
|
"loss": 0.9586, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2059447983014862, |
|
"grad_norm": 0.7004398610006882, |
|
"learning_rate": 2.8120147348371912e-05, |
|
"loss": 0.9575, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.208067940552017, |
|
"grad_norm": 0.7161258126899045, |
|
"learning_rate": 2.8068724657891507e-05, |
|
"loss": 0.9429, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.21019108280254778, |
|
"grad_norm": 0.7498697224476674, |
|
"learning_rate": 2.801665643787303e-05, |
|
"loss": 1.0132, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.21231422505307856, |
|
"grad_norm": 0.6820422896195972, |
|
"learning_rate": 2.7963945260225748e-05, |
|
"loss": 1.0142, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.21443736730360935, |
|
"grad_norm": 0.6998151084514577, |
|
"learning_rate": 2.7910593728617813e-05, |
|
"loss": 1.0474, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.21656050955414013, |
|
"grad_norm": 0.6659748871284866, |
|
"learning_rate": 2.7856604478347655e-05, |
|
"loss": 0.964, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.21868365180467092, |
|
"grad_norm": 0.6639674612741819, |
|
"learning_rate": 2.7801980176213798e-05, |
|
"loss": 0.9141, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.2208067940552017, |
|
"grad_norm": 0.7962940595848361, |
|
"learning_rate": 2.7746723520383174e-05, |
|
"loss": 0.98, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.2229299363057325, |
|
"grad_norm": 0.6748458306743221, |
|
"learning_rate": 2.76908372402578e-05, |
|
"loss": 0.9387, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.22505307855626328, |
|
"grad_norm": 0.6591397874457393, |
|
"learning_rate": 2.763432409633998e-05, |
|
"loss": 0.9401, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.22717622080679406, |
|
"grad_norm": 0.6815163810802223, |
|
"learning_rate": 2.7577186880095966e-05, |
|
"loss": 0.9852, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.22929936305732485, |
|
"grad_norm": 0.6786470466077741, |
|
"learning_rate": 2.7519428413818034e-05, |
|
"loss": 0.9549, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.23142250530785563, |
|
"grad_norm": 0.722955020585509, |
|
"learning_rate": 2.7461051550485116e-05, |
|
"loss": 1.0021, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.23354564755838642, |
|
"grad_norm": 0.7518724654809783, |
|
"learning_rate": 2.740205917362186e-05, |
|
"loss": 1.0168, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2356687898089172, |
|
"grad_norm": 0.5989515308193117, |
|
"learning_rate": 2.7342454197156194e-05, |
|
"loss": 0.9608, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.23779193205944799, |
|
"grad_norm": 0.63739062901645, |
|
"learning_rate": 2.728223956527539e-05, |
|
"loss": 0.9779, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.23991507430997877, |
|
"grad_norm": 0.7107030178119403, |
|
"learning_rate": 2.722141825228066e-05, |
|
"loss": 0.9507, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.24203821656050956, |
|
"grad_norm": 0.6662094575605222, |
|
"learning_rate": 2.7159993262440228e-05, |
|
"loss": 0.9566, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.24416135881104034, |
|
"grad_norm": 0.684949594473451, |
|
"learning_rate": 2.7097967629840906e-05, |
|
"loss": 1.0072, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.24628450106157113, |
|
"grad_norm": 0.717234854863913, |
|
"learning_rate": 2.703534441823827e-05, |
|
"loss": 0.9774, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2484076433121019, |
|
"grad_norm": 0.6600997556047009, |
|
"learning_rate": 2.6972126720905293e-05, |
|
"loss": 0.9804, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2505307855626327, |
|
"grad_norm": 0.6717502933643555, |
|
"learning_rate": 2.6908317660479583e-05, |
|
"loss": 0.9805, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2526539278131635, |
|
"grad_norm": 0.6668903744774365, |
|
"learning_rate": 2.68439203888091e-05, |
|
"loss": 1.012, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.25477707006369427, |
|
"grad_norm": 0.7423058243771732, |
|
"learning_rate": 2.6778938086796512e-05, |
|
"loss": 1.0772, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.25690021231422505, |
|
"grad_norm": 0.7186526888110079, |
|
"learning_rate": 2.6713373964242043e-05, |
|
"loss": 0.9614, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.25902335456475584, |
|
"grad_norm": 0.7020168430150423, |
|
"learning_rate": 2.6647231259684946e-05, |
|
"loss": 0.9639, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2611464968152866, |
|
"grad_norm": 0.6506699216795314, |
|
"learning_rate": 2.6580513240243524e-05, |
|
"loss": 1.0157, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2632696390658174, |
|
"grad_norm": 0.6979496800781052, |
|
"learning_rate": 2.651322320145375e-05, |
|
"loss": 0.9827, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.2653927813163482, |
|
"grad_norm": 0.7041593201924282, |
|
"learning_rate": 2.6445364467106494e-05, |
|
"loss": 0.9416, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.267515923566879, |
|
"grad_norm": 0.7474469108757689, |
|
"learning_rate": 2.637694038908333e-05, |
|
"loss": 1.0345, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.26963906581740976, |
|
"grad_norm": 0.6614249491962885, |
|
"learning_rate": 2.630795434719099e-05, |
|
"loss": 0.9453, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.27176220806794055, |
|
"grad_norm": 0.6657326589669448, |
|
"learning_rate": 2.623840974899439e-05, |
|
"loss": 0.889, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.27388535031847133, |
|
"grad_norm": 0.8022831021638541, |
|
"learning_rate": 2.616831002964834e-05, |
|
"loss": 0.9739, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2760084925690021, |
|
"grad_norm": 0.7179257591811719, |
|
"learning_rate": 2.609765865172786e-05, |
|
"loss": 0.9843, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2781316348195329, |
|
"grad_norm": 0.605816781626969, |
|
"learning_rate": 2.6026459105057127e-05, |
|
"loss": 0.96, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2802547770700637, |
|
"grad_norm": 0.6689266661628013, |
|
"learning_rate": 2.5954714906537116e-05, |
|
"loss": 0.9829, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.2823779193205945, |
|
"grad_norm": 0.6414905131754337, |
|
"learning_rate": 2.5882429599971872e-05, |
|
"loss": 0.9315, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.28450106157112526, |
|
"grad_norm": 0.6383947258042313, |
|
"learning_rate": 2.580960675589347e-05, |
|
"loss": 1.0047, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.28662420382165604, |
|
"grad_norm": 0.6843220347965844, |
|
"learning_rate": 2.573624997138565e-05, |
|
"loss": 1.0134, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.28874734607218683, |
|
"grad_norm": 0.6783409077200856, |
|
"learning_rate": 2.5662362869906123e-05, |
|
"loss": 0.9991, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.2908704883227176, |
|
"grad_norm": 0.6082410802648054, |
|
"learning_rate": 2.558794910110761e-05, |
|
"loss": 0.9365, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.2929936305732484, |
|
"grad_norm": 0.6110004204749758, |
|
"learning_rate": 2.5513012340657557e-05, |
|
"loss": 0.9458, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.2951167728237792, |
|
"grad_norm": 0.6537047244906001, |
|
"learning_rate": 2.5437556290056575e-05, |
|
"loss": 0.9348, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.29723991507430997, |
|
"grad_norm": 0.6237035022111638, |
|
"learning_rate": 2.5361584676455627e-05, |
|
"loss": 0.9963, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.29936305732484075, |
|
"grad_norm": 0.658465414967936, |
|
"learning_rate": 2.5285101252471874e-05, |
|
"loss": 0.9365, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.30148619957537154, |
|
"grad_norm": 0.6275241420274199, |
|
"learning_rate": 2.5208109796003364e-05, |
|
"loss": 1.0091, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3036093418259023, |
|
"grad_norm": 0.6338044548670078, |
|
"learning_rate": 2.5130614110042415e-05, |
|
"loss": 1.0313, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.3057324840764331, |
|
"grad_norm": 0.6708789723264734, |
|
"learning_rate": 2.5052618022487733e-05, |
|
"loss": 0.9077, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.3078556263269639, |
|
"grad_norm": 0.6673243086867044, |
|
"learning_rate": 2.4974125385955374e-05, |
|
"loss": 0.9733, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3099787685774947, |
|
"grad_norm": 0.6708310719412189, |
|
"learning_rate": 2.4895140077588412e-05, |
|
"loss": 0.9224, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.31210191082802546, |
|
"grad_norm": 0.744990991437575, |
|
"learning_rate": 2.481566599886546e-05, |
|
"loss": 0.9491, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.31422505307855625, |
|
"grad_norm": 0.6620719317323561, |
|
"learning_rate": 2.473570707540793e-05, |
|
"loss": 1.0361, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.31634819532908703, |
|
"grad_norm": 0.6265774077983216, |
|
"learning_rate": 2.4655267256786126e-05, |
|
"loss": 1.0269, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3184713375796178, |
|
"grad_norm": 0.6861510574862938, |
|
"learning_rate": 2.4574350516324176e-05, |
|
"loss": 0.9546, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3205944798301486, |
|
"grad_norm": 0.6074369745423525, |
|
"learning_rate": 2.4492960850903757e-05, |
|
"loss": 0.9913, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.3227176220806794, |
|
"grad_norm": 0.6170833154929086, |
|
"learning_rate": 2.4411102280766658e-05, |
|
"loss": 0.9914, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3248407643312102, |
|
"grad_norm": 0.6453490886193386, |
|
"learning_rate": 2.432877884931623e-05, |
|
"loss": 1.0185, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.32696390658174096, |
|
"grad_norm": 0.6376312745074503, |
|
"learning_rate": 2.4245994622917636e-05, |
|
"loss": 1.014, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.32908704883227174, |
|
"grad_norm": 0.5742876859098425, |
|
"learning_rate": 2.4162753690696998e-05, |
|
"loss": 1.0054, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.33121019108280253, |
|
"grad_norm": 0.6636167519331397, |
|
"learning_rate": 2.4079060164339427e-05, |
|
"loss": 0.988, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.6393378453153665, |
|
"learning_rate": 2.3994918177885906e-05, |
|
"loss": 1.0362, |
|
"step": 157 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 471, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 157, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 23254277554176.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|