|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.1150592216582065, |
|
"eval_steps": 500, |
|
"global_step": 2500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008460236886632826, |
|
"grad_norm": 2.4233109951019287, |
|
"learning_rate": 1.9949238578680207e-05, |
|
"loss": 1.3216, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01692047377326565, |
|
"grad_norm": 1.1721662282943726, |
|
"learning_rate": 1.9892836999435986e-05, |
|
"loss": 0.5312, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.025380710659898477, |
|
"grad_norm": 0.899889349937439, |
|
"learning_rate": 1.9836435420191765e-05, |
|
"loss": 0.2969, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0338409475465313, |
|
"grad_norm": 0.4674186408519745, |
|
"learning_rate": 1.9780033840947548e-05, |
|
"loss": 0.1564, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04230118443316413, |
|
"grad_norm": 0.9013752937316895, |
|
"learning_rate": 1.972363226170333e-05, |
|
"loss": 0.0966, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.050761421319796954, |
|
"grad_norm": 0.364164799451828, |
|
"learning_rate": 1.966723068245911e-05, |
|
"loss": 0.0516, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05922165820642978, |
|
"grad_norm": 0.22717136144638062, |
|
"learning_rate": 1.9610829103214893e-05, |
|
"loss": 0.0307, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0676818950930626, |
|
"grad_norm": 0.09149003773927689, |
|
"learning_rate": 1.9554427523970672e-05, |
|
"loss": 0.0182, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07614213197969544, |
|
"grad_norm": 0.12942063808441162, |
|
"learning_rate": 1.949802594472645e-05, |
|
"loss": 0.017, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08460236886632826, |
|
"grad_norm": 0.27440059185028076, |
|
"learning_rate": 1.9441624365482234e-05, |
|
"loss": 0.0113, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09306260575296109, |
|
"grad_norm": 0.5005059242248535, |
|
"learning_rate": 1.9385222786238017e-05, |
|
"loss": 0.0082, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.10152284263959391, |
|
"grad_norm": 0.19820384681224823, |
|
"learning_rate": 1.9328821206993796e-05, |
|
"loss": 0.0057, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10998307952622674, |
|
"grad_norm": 0.03431914001703262, |
|
"learning_rate": 1.927241962774958e-05, |
|
"loss": 0.0055, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11844331641285956, |
|
"grad_norm": 0.4414117634296417, |
|
"learning_rate": 1.921601804850536e-05, |
|
"loss": 0.0063, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12690355329949238, |
|
"grad_norm": 0.9055352807044983, |
|
"learning_rate": 1.915961646926114e-05, |
|
"loss": 0.0048, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1353637901861252, |
|
"grad_norm": 0.036724768579006195, |
|
"learning_rate": 1.910321489001692e-05, |
|
"loss": 0.0035, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.14382402707275804, |
|
"grad_norm": 0.0888664722442627, |
|
"learning_rate": 1.9046813310772703e-05, |
|
"loss": 0.0033, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.15228426395939088, |
|
"grad_norm": 0.019098607823252678, |
|
"learning_rate": 1.8990411731528486e-05, |
|
"loss": 0.0029, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.16074450084602368, |
|
"grad_norm": 0.0171552412211895, |
|
"learning_rate": 1.8934010152284265e-05, |
|
"loss": 0.0029, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1692047377326565, |
|
"grad_norm": 0.01885647512972355, |
|
"learning_rate": 1.8877608573040048e-05, |
|
"loss": 0.0028, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17766497461928935, |
|
"grad_norm": 0.01599389687180519, |
|
"learning_rate": 1.8821206993795827e-05, |
|
"loss": 0.0022, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.18612521150592218, |
|
"grad_norm": 0.017242038622498512, |
|
"learning_rate": 1.876480541455161e-05, |
|
"loss": 0.0023, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.19458544839255498, |
|
"grad_norm": 0.5941590666770935, |
|
"learning_rate": 1.870840383530739e-05, |
|
"loss": 0.0031, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.20304568527918782, |
|
"grad_norm": 0.4872148931026459, |
|
"learning_rate": 1.8652002256063172e-05, |
|
"loss": 0.0027, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21150592216582065, |
|
"grad_norm": 0.014173777773976326, |
|
"learning_rate": 1.859560067681895e-05, |
|
"loss": 0.0021, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.21996615905245348, |
|
"grad_norm": 0.024433018639683723, |
|
"learning_rate": 1.8539199097574734e-05, |
|
"loss": 0.0017, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.22842639593908629, |
|
"grad_norm": 0.017857005819678307, |
|
"learning_rate": 1.8482797518330516e-05, |
|
"loss": 0.0017, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.23688663282571912, |
|
"grad_norm": 0.014704135246574879, |
|
"learning_rate": 1.8426395939086296e-05, |
|
"loss": 0.0021, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.24534686971235195, |
|
"grad_norm": 0.022361995652318, |
|
"learning_rate": 1.8369994359842075e-05, |
|
"loss": 0.0016, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.25380710659898476, |
|
"grad_norm": 0.01112055778503418, |
|
"learning_rate": 1.8313592780597858e-05, |
|
"loss": 0.0015, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2622673434856176, |
|
"grad_norm": 0.010048450902104378, |
|
"learning_rate": 1.825719120135364e-05, |
|
"loss": 0.0015, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2707275803722504, |
|
"grad_norm": 0.012935510836541653, |
|
"learning_rate": 1.820078962210942e-05, |
|
"loss": 0.0014, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.27918781725888325, |
|
"grad_norm": 0.010541570372879505, |
|
"learning_rate": 1.8144388042865203e-05, |
|
"loss": 0.0013, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.2876480541455161, |
|
"grad_norm": 0.009258048608899117, |
|
"learning_rate": 1.8087986463620982e-05, |
|
"loss": 0.0012, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.2961082910321489, |
|
"grad_norm": 0.009454768151044846, |
|
"learning_rate": 1.8031584884376765e-05, |
|
"loss": 0.0015, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.30456852791878175, |
|
"grad_norm": 0.018366724252700806, |
|
"learning_rate": 1.7975183305132544e-05, |
|
"loss": 0.0012, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3130287648054145, |
|
"grad_norm": 0.009252658113837242, |
|
"learning_rate": 1.7918781725888327e-05, |
|
"loss": 0.0011, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.32148900169204736, |
|
"grad_norm": 0.1430797576904297, |
|
"learning_rate": 1.7862380146644106e-05, |
|
"loss": 0.0018, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3299492385786802, |
|
"grad_norm": 0.009182159788906574, |
|
"learning_rate": 1.780597856739989e-05, |
|
"loss": 0.0018, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.338409475465313, |
|
"grad_norm": 0.00843009352684021, |
|
"learning_rate": 1.774957698815567e-05, |
|
"loss": 0.0017, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.34686971235194586, |
|
"grad_norm": 0.008176930248737335, |
|
"learning_rate": 1.769317540891145e-05, |
|
"loss": 0.0013, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.3553299492385787, |
|
"grad_norm": 0.008539380505681038, |
|
"learning_rate": 1.7636773829667233e-05, |
|
"loss": 0.001, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3637901861252115, |
|
"grad_norm": 0.008693977259099483, |
|
"learning_rate": 1.7580372250423013e-05, |
|
"loss": 0.0014, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.37225042301184436, |
|
"grad_norm": 0.009141940623521805, |
|
"learning_rate": 1.7523970671178795e-05, |
|
"loss": 0.0012, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.38071065989847713, |
|
"grad_norm": 0.009133282117545605, |
|
"learning_rate": 1.7467569091934575e-05, |
|
"loss": 0.0015, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.38917089678510997, |
|
"grad_norm": 0.5799979567527771, |
|
"learning_rate": 1.7411167512690357e-05, |
|
"loss": 0.0013, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.3976311336717428, |
|
"grad_norm": 0.04120843857526779, |
|
"learning_rate": 1.7354765933446137e-05, |
|
"loss": 0.0011, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.40609137055837563, |
|
"grad_norm": 0.01276948768645525, |
|
"learning_rate": 1.729836435420192e-05, |
|
"loss": 0.0012, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.41455160744500846, |
|
"grad_norm": 0.0076572224497795105, |
|
"learning_rate": 1.7241962774957702e-05, |
|
"loss": 0.0014, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.4230118443316413, |
|
"grad_norm": 0.008293437771499157, |
|
"learning_rate": 1.718556119571348e-05, |
|
"loss": 0.0009, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.43147208121827413, |
|
"grad_norm": 0.0068985214456915855, |
|
"learning_rate": 1.712915961646926e-05, |
|
"loss": 0.0009, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.43993231810490696, |
|
"grad_norm": 0.029666345566511154, |
|
"learning_rate": 1.7072758037225043e-05, |
|
"loss": 0.0009, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.44839255499153974, |
|
"grad_norm": 0.014065027236938477, |
|
"learning_rate": 1.7016356457980826e-05, |
|
"loss": 0.0008, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.45685279187817257, |
|
"grad_norm": 0.00650499714538455, |
|
"learning_rate": 1.6959954878736606e-05, |
|
"loss": 0.0008, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.4653130287648054, |
|
"grad_norm": 0.0065047661773860455, |
|
"learning_rate": 1.6903553299492388e-05, |
|
"loss": 0.0008, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.47377326565143824, |
|
"grad_norm": 0.0071571446023881435, |
|
"learning_rate": 1.684715172024817e-05, |
|
"loss": 0.0008, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.48223350253807107, |
|
"grad_norm": 0.005875805858522654, |
|
"learning_rate": 1.679075014100395e-05, |
|
"loss": 0.0008, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.4906937394247039, |
|
"grad_norm": 0.005848041269928217, |
|
"learning_rate": 1.673434856175973e-05, |
|
"loss": 0.0007, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.49915397631133673, |
|
"grad_norm": 0.006032396107912064, |
|
"learning_rate": 1.6677946982515512e-05, |
|
"loss": 0.0007, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5076142131979695, |
|
"grad_norm": 0.005858593620359898, |
|
"learning_rate": 1.662154540327129e-05, |
|
"loss": 0.0007, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5160744500846024, |
|
"grad_norm": 0.006055026315152645, |
|
"learning_rate": 1.6565143824027074e-05, |
|
"loss": 0.0007, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5245346869712352, |
|
"grad_norm": 0.0055991546250879765, |
|
"learning_rate": 1.6508742244782857e-05, |
|
"loss": 0.0039, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5329949238578681, |
|
"grad_norm": 0.006043154280632734, |
|
"learning_rate": 1.6452340665538636e-05, |
|
"loss": 0.0009, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5414551607445008, |
|
"grad_norm": 0.006406570319086313, |
|
"learning_rate": 1.6395939086294416e-05, |
|
"loss": 0.0008, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.5499153976311336, |
|
"grad_norm": 0.006317532621324062, |
|
"learning_rate": 1.63395375070502e-05, |
|
"loss": 0.0007, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.5583756345177665, |
|
"grad_norm": 0.0260737594217062, |
|
"learning_rate": 1.628313592780598e-05, |
|
"loss": 0.0007, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5668358714043993, |
|
"grad_norm": 0.005516720470041037, |
|
"learning_rate": 1.622673434856176e-05, |
|
"loss": 0.0007, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.5752961082910322, |
|
"grad_norm": 0.005611425265669823, |
|
"learning_rate": 1.6170332769317543e-05, |
|
"loss": 0.0006, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.583756345177665, |
|
"grad_norm": 0.005698953289538622, |
|
"learning_rate": 1.6113931190073326e-05, |
|
"loss": 0.0006, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.5922165820642978, |
|
"grad_norm": 0.005045793950557709, |
|
"learning_rate": 1.6057529610829105e-05, |
|
"loss": 0.0006, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6006768189509306, |
|
"grad_norm": 0.005404220428317785, |
|
"learning_rate": 1.6001128031584884e-05, |
|
"loss": 0.0007, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6091370558375635, |
|
"grad_norm": 0.005528539884835482, |
|
"learning_rate": 1.5944726452340667e-05, |
|
"loss": 0.0007, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6175972927241963, |
|
"grad_norm": 0.0052613625302910805, |
|
"learning_rate": 1.5888324873096446e-05, |
|
"loss": 0.0006, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.626057529610829, |
|
"grad_norm": 0.0050328257493674755, |
|
"learning_rate": 1.583192329385223e-05, |
|
"loss": 0.0006, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6345177664974619, |
|
"grad_norm": 0.005106599070131779, |
|
"learning_rate": 1.5775521714608012e-05, |
|
"loss": 0.0006, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6429780033840947, |
|
"grad_norm": 0.004836504813283682, |
|
"learning_rate": 1.571912013536379e-05, |
|
"loss": 0.0006, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.6514382402707276, |
|
"grad_norm": 0.004986999090760946, |
|
"learning_rate": 1.566271855611957e-05, |
|
"loss": 0.0006, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.6598984771573604, |
|
"grad_norm": 0.005235906690359116, |
|
"learning_rate": 1.5606316976875353e-05, |
|
"loss": 0.0005, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.6683587140439933, |
|
"grad_norm": 0.4625732898712158, |
|
"learning_rate": 1.5549915397631136e-05, |
|
"loss": 0.0009, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.676818950930626, |
|
"grad_norm": 0.004936546087265015, |
|
"learning_rate": 1.5493513818386915e-05, |
|
"loss": 0.0006, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.6852791878172588, |
|
"grad_norm": 0.004956043791025877, |
|
"learning_rate": 1.5437112239142698e-05, |
|
"loss": 0.0005, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.6937394247038917, |
|
"grad_norm": 0.005084918346256018, |
|
"learning_rate": 1.538071065989848e-05, |
|
"loss": 0.0005, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7021996615905245, |
|
"grad_norm": 0.006050592288374901, |
|
"learning_rate": 1.532430908065426e-05, |
|
"loss": 0.0005, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7106598984771574, |
|
"grad_norm": 0.0045923274010419846, |
|
"learning_rate": 1.526790750141004e-05, |
|
"loss": 0.0005, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7191201353637902, |
|
"grad_norm": 0.004406214691698551, |
|
"learning_rate": 1.5211505922165822e-05, |
|
"loss": 0.0005, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.727580372250423, |
|
"grad_norm": 0.00449965288862586, |
|
"learning_rate": 1.5155104342921603e-05, |
|
"loss": 0.0005, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.7360406091370558, |
|
"grad_norm": 0.00427517294883728, |
|
"learning_rate": 1.5098702763677384e-05, |
|
"loss": 0.0006, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.7445008460236887, |
|
"grad_norm": 0.006013456266373396, |
|
"learning_rate": 1.5042301184433165e-05, |
|
"loss": 0.0005, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.7529610829103215, |
|
"grad_norm": 0.004456107504665852, |
|
"learning_rate": 1.4985899605188948e-05, |
|
"loss": 0.0005, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.7614213197969543, |
|
"grad_norm": 0.004223175812512636, |
|
"learning_rate": 1.4929498025944729e-05, |
|
"loss": 0.0005, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.7698815566835872, |
|
"grad_norm": 0.004154487047344446, |
|
"learning_rate": 1.4873096446700508e-05, |
|
"loss": 0.0005, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.7783417935702199, |
|
"grad_norm": 0.007055574096739292, |
|
"learning_rate": 1.4816694867456289e-05, |
|
"loss": 0.0005, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.7868020304568528, |
|
"grad_norm": 0.005269153974950314, |
|
"learning_rate": 1.476029328821207e-05, |
|
"loss": 0.0005, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.7952622673434856, |
|
"grad_norm": 0.004041856154799461, |
|
"learning_rate": 1.4703891708967853e-05, |
|
"loss": 0.0005, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8037225042301185, |
|
"grad_norm": 0.003965588286519051, |
|
"learning_rate": 1.4647490129723634e-05, |
|
"loss": 0.0005, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8121827411167513, |
|
"grad_norm": 0.005661304574459791, |
|
"learning_rate": 1.4591088550479415e-05, |
|
"loss": 0.0005, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8206429780033841, |
|
"grad_norm": 0.0039055896922945976, |
|
"learning_rate": 1.4534686971235198e-05, |
|
"loss": 0.0011, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.8291032148900169, |
|
"grad_norm": 1.8310741186141968, |
|
"learning_rate": 1.4478285391990977e-05, |
|
"loss": 0.001, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.8375634517766497, |
|
"grad_norm": 0.005107107572257519, |
|
"learning_rate": 1.4421883812746758e-05, |
|
"loss": 0.0008, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.8460236886632826, |
|
"grad_norm": 0.005387498531490564, |
|
"learning_rate": 1.4365482233502539e-05, |
|
"loss": 0.0005, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8544839255499154, |
|
"grad_norm": 0.0051966467872262, |
|
"learning_rate": 1.430908065425832e-05, |
|
"loss": 0.0005, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.8629441624365483, |
|
"grad_norm": 0.004603747744113207, |
|
"learning_rate": 1.4252679075014103e-05, |
|
"loss": 0.0005, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.871404399323181, |
|
"grad_norm": 0.0113060986623168, |
|
"learning_rate": 1.4196277495769884e-05, |
|
"loss": 0.0005, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.8798646362098139, |
|
"grad_norm": 0.004159980919212103, |
|
"learning_rate": 1.4139875916525665e-05, |
|
"loss": 0.0004, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.8883248730964467, |
|
"grad_norm": 0.004094019532203674, |
|
"learning_rate": 1.4083474337281444e-05, |
|
"loss": 0.0004, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.8967851099830795, |
|
"grad_norm": 0.00435257563367486, |
|
"learning_rate": 1.4027072758037225e-05, |
|
"loss": 0.0004, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9052453468697124, |
|
"grad_norm": 0.003843541955575347, |
|
"learning_rate": 1.3970671178793008e-05, |
|
"loss": 0.0004, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.9137055837563451, |
|
"grad_norm": 0.16885782778263092, |
|
"learning_rate": 1.3914269599548789e-05, |
|
"loss": 0.0005, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.922165820642978, |
|
"grad_norm": 0.0037802334409207106, |
|
"learning_rate": 1.385786802030457e-05, |
|
"loss": 0.0004, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.9306260575296108, |
|
"grad_norm": 0.0037738329265266657, |
|
"learning_rate": 1.3801466441060352e-05, |
|
"loss": 0.0005, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.9390862944162437, |
|
"grad_norm": 0.012493060901761055, |
|
"learning_rate": 1.374506486181613e-05, |
|
"loss": 0.0004, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.9475465313028765, |
|
"grad_norm": 0.0037303464487195015, |
|
"learning_rate": 1.3688663282571913e-05, |
|
"loss": 0.0004, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.9560067681895094, |
|
"grad_norm": 0.0035704418551176786, |
|
"learning_rate": 1.3632261703327694e-05, |
|
"loss": 0.0007, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.9644670050761421, |
|
"grad_norm": 0.0038276014383882284, |
|
"learning_rate": 1.3575860124083475e-05, |
|
"loss": 0.0004, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.9729272419627749, |
|
"grad_norm": 0.004172508604824543, |
|
"learning_rate": 1.3519458544839257e-05, |
|
"loss": 0.0004, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.9813874788494078, |
|
"grad_norm": 0.20282192528247833, |
|
"learning_rate": 1.3463056965595038e-05, |
|
"loss": 0.0004, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.9898477157360406, |
|
"grad_norm": 0.0035703997127711773, |
|
"learning_rate": 1.340665538635082e-05, |
|
"loss": 0.0004, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.9983079526226735, |
|
"grad_norm": 0.003505186177790165, |
|
"learning_rate": 1.3350253807106599e-05, |
|
"loss": 0.0004, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.0067681895093064, |
|
"grad_norm": 0.003632462117820978, |
|
"learning_rate": 1.329385222786238e-05, |
|
"loss": 0.0004, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.015228426395939, |
|
"grad_norm": 0.0034904363565146923, |
|
"learning_rate": 1.3237450648618163e-05, |
|
"loss": 0.0004, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.023688663282572, |
|
"grad_norm": 0.003400213085114956, |
|
"learning_rate": 1.3181049069373944e-05, |
|
"loss": 0.0004, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.0321489001692048, |
|
"grad_norm": 0.02098667249083519, |
|
"learning_rate": 1.3124647490129725e-05, |
|
"loss": 0.0004, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.0406091370558375, |
|
"grad_norm": 0.0033113211393356323, |
|
"learning_rate": 1.3068245910885506e-05, |
|
"loss": 0.0004, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.0490693739424704, |
|
"grad_norm": 0.0075438846834003925, |
|
"learning_rate": 1.3011844331641288e-05, |
|
"loss": 0.001, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.0575296108291032, |
|
"grad_norm": 0.003400578862056136, |
|
"learning_rate": 1.2955442752397068e-05, |
|
"loss": 0.0006, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.0659898477157361, |
|
"grad_norm": 0.003571244655176997, |
|
"learning_rate": 1.2899041173152849e-05, |
|
"loss": 0.0004, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.0744500846023688, |
|
"grad_norm": 0.0032794324215501547, |
|
"learning_rate": 1.284263959390863e-05, |
|
"loss": 0.0004, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.0829103214890017, |
|
"grad_norm": 0.0032312178518623114, |
|
"learning_rate": 1.2786238014664412e-05, |
|
"loss": 0.0011, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.0913705583756346, |
|
"grad_norm": 0.003224333981052041, |
|
"learning_rate": 1.2729836435420193e-05, |
|
"loss": 0.0003, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.0998307952622675, |
|
"grad_norm": 0.0033167279325425625, |
|
"learning_rate": 1.2673434856175974e-05, |
|
"loss": 0.0004, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.1082910321489001, |
|
"grad_norm": 0.003647689940407872, |
|
"learning_rate": 1.2617033276931755e-05, |
|
"loss": 0.0009, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.116751269035533, |
|
"grad_norm": 0.005791259463876486, |
|
"learning_rate": 1.2560631697687535e-05, |
|
"loss": 0.0004, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.125211505922166, |
|
"grad_norm": 0.0036105727776885033, |
|
"learning_rate": 1.2504230118443317e-05, |
|
"loss": 0.0005, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.1336717428087986, |
|
"grad_norm": 0.0033262548968195915, |
|
"learning_rate": 1.2447828539199098e-05, |
|
"loss": 0.0003, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.1421319796954315, |
|
"grad_norm": 0.003255200106650591, |
|
"learning_rate": 1.239142695995488e-05, |
|
"loss": 0.0003, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.1505922165820643, |
|
"grad_norm": 0.0031492102425545454, |
|
"learning_rate": 1.233502538071066e-05, |
|
"loss": 0.0003, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.1590524534686972, |
|
"grad_norm": 0.0031071833800524473, |
|
"learning_rate": 1.2278623801466443e-05, |
|
"loss": 0.0006, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.16751269035533, |
|
"grad_norm": 0.003029242157936096, |
|
"learning_rate": 1.2222222222222224e-05, |
|
"loss": 0.0004, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.1759729272419628, |
|
"grad_norm": 0.003054770641028881, |
|
"learning_rate": 1.2165820642978003e-05, |
|
"loss": 0.0004, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.1844331641285957, |
|
"grad_norm": 0.002985539613291621, |
|
"learning_rate": 1.2109419063733784e-05, |
|
"loss": 0.0003, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.1928934010152283, |
|
"grad_norm": 0.0029861750081181526, |
|
"learning_rate": 1.2053017484489567e-05, |
|
"loss": 0.0003, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.2013536379018612, |
|
"grad_norm": 0.0031786304898560047, |
|
"learning_rate": 1.1996615905245348e-05, |
|
"loss": 0.0003, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.2098138747884941, |
|
"grad_norm": 0.0029660591389983892, |
|
"learning_rate": 1.194021432600113e-05, |
|
"loss": 0.0003, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.218274111675127, |
|
"grad_norm": 0.0029080845415592194, |
|
"learning_rate": 1.188381274675691e-05, |
|
"loss": 0.0003, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.2267343485617597, |
|
"grad_norm": 0.003032978158444166, |
|
"learning_rate": 1.1827411167512693e-05, |
|
"loss": 0.0003, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.2351945854483926, |
|
"grad_norm": 0.0028783201705664396, |
|
"learning_rate": 1.1771009588268472e-05, |
|
"loss": 0.0003, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.2436548223350254, |
|
"grad_norm": 0.002919497899711132, |
|
"learning_rate": 1.1714608009024253e-05, |
|
"loss": 0.0003, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.252115059221658, |
|
"grad_norm": 0.002829916076734662, |
|
"learning_rate": 1.1658206429780034e-05, |
|
"loss": 0.0003, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.260575296108291, |
|
"grad_norm": 0.0028766861651092768, |
|
"learning_rate": 1.1601804850535815e-05, |
|
"loss": 0.0003, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.2690355329949239, |
|
"grad_norm": 0.0027290356811136007, |
|
"learning_rate": 1.1545403271291598e-05, |
|
"loss": 0.0003, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.2774957698815568, |
|
"grad_norm": 0.0027461503632366657, |
|
"learning_rate": 1.1489001692047379e-05, |
|
"loss": 0.0003, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.2859560067681894, |
|
"grad_norm": 0.0027623216155916452, |
|
"learning_rate": 1.143260011280316e-05, |
|
"loss": 0.0003, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.2944162436548223, |
|
"grad_norm": 0.0028906099032610655, |
|
"learning_rate": 1.137619853355894e-05, |
|
"loss": 0.0003, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.3028764805414552, |
|
"grad_norm": 0.0027789094019681215, |
|
"learning_rate": 1.1319796954314722e-05, |
|
"loss": 0.0003, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.3113367174280879, |
|
"grad_norm": 0.00271011283621192, |
|
"learning_rate": 1.1263395375070503e-05, |
|
"loss": 0.0003, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.3197969543147208, |
|
"grad_norm": 0.002672335831448436, |
|
"learning_rate": 1.1206993795826284e-05, |
|
"loss": 0.0003, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.3282571912013537, |
|
"grad_norm": 0.002650258131325245, |
|
"learning_rate": 1.1150592216582065e-05, |
|
"loss": 0.0003, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.3367174280879865, |
|
"grad_norm": 0.002780508017167449, |
|
"learning_rate": 1.1094190637337848e-05, |
|
"loss": 0.0003, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.3451776649746192, |
|
"grad_norm": 0.0026137123350054026, |
|
"learning_rate": 1.1037789058093627e-05, |
|
"loss": 0.0003, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.353637901861252, |
|
"grad_norm": 0.0026269517838954926, |
|
"learning_rate": 1.0981387478849408e-05, |
|
"loss": 0.0003, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.362098138747885, |
|
"grad_norm": 0.0025848057121038437, |
|
"learning_rate": 1.092498589960519e-05, |
|
"loss": 0.0003, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.3705583756345177, |
|
"grad_norm": 0.0025470599066466093, |
|
"learning_rate": 1.086858432036097e-05, |
|
"loss": 0.0003, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.3790186125211505, |
|
"grad_norm": 0.0027439745608717203, |
|
"learning_rate": 1.0812182741116753e-05, |
|
"loss": 0.0003, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.3874788494077834, |
|
"grad_norm": 0.002537579508498311, |
|
"learning_rate": 1.0755781161872534e-05, |
|
"loss": 0.0003, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.3959390862944163, |
|
"grad_norm": 0.0025688863825052977, |
|
"learning_rate": 1.0699379582628315e-05, |
|
"loss": 0.0003, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.404399323181049, |
|
"grad_norm": 0.0026179850101470947, |
|
"learning_rate": 1.0642978003384094e-05, |
|
"loss": 0.0003, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.4128595600676819, |
|
"grad_norm": 0.002496235305443406, |
|
"learning_rate": 1.0586576424139875e-05, |
|
"loss": 0.0003, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.4213197969543148, |
|
"grad_norm": 0.0026708145160228014, |
|
"learning_rate": 1.0530174844895658e-05, |
|
"loss": 0.0003, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.4297800338409474, |
|
"grad_norm": 0.0025831579696387053, |
|
"learning_rate": 1.0473773265651439e-05, |
|
"loss": 0.0003, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.4382402707275803, |
|
"grad_norm": 0.0024914774112403393, |
|
"learning_rate": 1.041737168640722e-05, |
|
"loss": 0.0003, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.4467005076142132, |
|
"grad_norm": 0.002529192017391324, |
|
"learning_rate": 1.0360970107163003e-05, |
|
"loss": 0.0003, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.455160744500846, |
|
"grad_norm": 0.0024393699131906033, |
|
"learning_rate": 1.0304568527918784e-05, |
|
"loss": 0.0011, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.463620981387479, |
|
"grad_norm": 0.0043233660981059074, |
|
"learning_rate": 1.0248166948674563e-05, |
|
"loss": 0.0004, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.4720812182741116, |
|
"grad_norm": 0.002547438722103834, |
|
"learning_rate": 1.0191765369430344e-05, |
|
"loss": 0.0005, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.4805414551607445, |
|
"grad_norm": 0.0025178396608680487, |
|
"learning_rate": 1.0135363790186125e-05, |
|
"loss": 0.0003, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.4890016920473772, |
|
"grad_norm": 0.003602534532546997, |
|
"learning_rate": 1.0078962210941908e-05, |
|
"loss": 0.0003, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.49746192893401, |
|
"grad_norm": 0.002990358741953969, |
|
"learning_rate": 1.0022560631697689e-05, |
|
"loss": 0.0003, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.505922165820643, |
|
"grad_norm": 0.0024538026191294193, |
|
"learning_rate": 9.96615905245347e-06, |
|
"loss": 0.0003, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.5143824027072759, |
|
"grad_norm": 0.0024382509291172028, |
|
"learning_rate": 9.90975747320925e-06, |
|
"loss": 0.0003, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.5228426395939088, |
|
"grad_norm": 0.0026386724784970284, |
|
"learning_rate": 9.853355893965032e-06, |
|
"loss": 0.0003, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.5313028764805414, |
|
"grad_norm": 0.0023787450045347214, |
|
"learning_rate": 9.796954314720813e-06, |
|
"loss": 0.0003, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.5397631133671743, |
|
"grad_norm": 0.002435258822515607, |
|
"learning_rate": 9.740552735476594e-06, |
|
"loss": 0.0003, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.548223350253807, |
|
"grad_norm": 0.32252246141433716, |
|
"learning_rate": 9.684151156232375e-06, |
|
"loss": 0.0003, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.5566835871404399, |
|
"grad_norm": 0.0023758108727633953, |
|
"learning_rate": 9.627749576988158e-06, |
|
"loss": 0.0003, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.5651438240270727, |
|
"grad_norm": 0.0023211485240608454, |
|
"learning_rate": 9.571347997743937e-06, |
|
"loss": 0.0003, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.5736040609137056, |
|
"grad_norm": 0.002314375713467598, |
|
"learning_rate": 9.514946418499718e-06, |
|
"loss": 0.0003, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.5820642978003385, |
|
"grad_norm": 0.0023043151013553143, |
|
"learning_rate": 9.4585448392555e-06, |
|
"loss": 0.0003, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.5905245346869712, |
|
"grad_norm": 0.0022787386551499367, |
|
"learning_rate": 9.402143260011282e-06, |
|
"loss": 0.0002, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.598984771573604, |
|
"grad_norm": 0.0022805132903158665, |
|
"learning_rate": 9.345741680767063e-06, |
|
"loss": 0.0003, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.6074450084602367, |
|
"grad_norm": 0.0022434191778302193, |
|
"learning_rate": 9.289340101522844e-06, |
|
"loss": 0.0003, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.6159052453468696, |
|
"grad_norm": 0.0022070412524044514, |
|
"learning_rate": 9.232938522278625e-06, |
|
"loss": 0.0002, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.6243654822335025, |
|
"grad_norm": 0.002298792591318488, |
|
"learning_rate": 9.176536943034406e-06, |
|
"loss": 0.0002, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.6328257191201354, |
|
"grad_norm": 0.002347590634599328, |
|
"learning_rate": 9.120135363790187e-06, |
|
"loss": 0.0002, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.6412859560067683, |
|
"grad_norm": 0.0022417185828089714, |
|
"learning_rate": 9.063733784545968e-06, |
|
"loss": 0.0002, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.649746192893401, |
|
"grad_norm": 0.002178118098527193, |
|
"learning_rate": 9.007332205301749e-06, |
|
"loss": 0.0002, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.6582064297800339, |
|
"grad_norm": 0.002204876858741045, |
|
"learning_rate": 8.95093062605753e-06, |
|
"loss": 0.0002, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.0021794794593006372, |
|
"learning_rate": 8.894529046813312e-06, |
|
"loss": 0.0002, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.6751269035532994, |
|
"grad_norm": 0.002448797458782792, |
|
"learning_rate": 8.838127467569093e-06, |
|
"loss": 0.0002, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.6835871404399323, |
|
"grad_norm": 0.00216574571095407, |
|
"learning_rate": 8.781725888324873e-06, |
|
"loss": 0.0002, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.6920473773265652, |
|
"grad_norm": 0.0022357029374688864, |
|
"learning_rate": 8.725324309080655e-06, |
|
"loss": 0.0002, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.700507614213198, |
|
"grad_norm": 0.0021086037158966064, |
|
"learning_rate": 8.668922729836436e-06, |
|
"loss": 0.0002, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.708967851099831, |
|
"grad_norm": 0.002162441611289978, |
|
"learning_rate": 8.612521150592217e-06, |
|
"loss": 0.0002, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.7174280879864636, |
|
"grad_norm": 0.0021641228813678026, |
|
"learning_rate": 8.556119571347998e-06, |
|
"loss": 0.0011, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.7258883248730963, |
|
"grad_norm": 0.0021566457580775023, |
|
"learning_rate": 8.49971799210378e-06, |
|
"loss": 0.0002, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.7343485617597292, |
|
"grad_norm": 0.0021678991615772247, |
|
"learning_rate": 8.44331641285956e-06, |
|
"loss": 0.0002, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.742808798646362, |
|
"grad_norm": 0.002119549084454775, |
|
"learning_rate": 8.386914833615342e-06, |
|
"loss": 0.0002, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.751269035532995, |
|
"grad_norm": 0.002093096962198615, |
|
"learning_rate": 8.330513254371123e-06, |
|
"loss": 0.0002, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.7597292724196278, |
|
"grad_norm": 0.002090982161462307, |
|
"learning_rate": 8.274111675126905e-06, |
|
"loss": 0.0002, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.7681895093062607, |
|
"grad_norm": 0.004076903220266104, |
|
"learning_rate": 8.217710095882685e-06, |
|
"loss": 0.0002, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.7766497461928934, |
|
"grad_norm": 0.0020308117382228374, |
|
"learning_rate": 8.161308516638467e-06, |
|
"loss": 0.0002, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.785109983079526, |
|
"grad_norm": 0.0021146961953490973, |
|
"learning_rate": 8.104906937394248e-06, |
|
"loss": 0.0002, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.793570219966159, |
|
"grad_norm": 0.0020670590456575155, |
|
"learning_rate": 8.048505358150028e-06, |
|
"loss": 0.0002, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.8020304568527918, |
|
"grad_norm": 0.0024381864350289106, |
|
"learning_rate": 7.99210377890581e-06, |
|
"loss": 0.0002, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.8104906937394247, |
|
"grad_norm": 0.002149208215996623, |
|
"learning_rate": 7.935702199661591e-06, |
|
"loss": 0.0008, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.8189509306260576, |
|
"grad_norm": 0.004185215570032597, |
|
"learning_rate": 7.879300620417372e-06, |
|
"loss": 0.0002, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.8274111675126905, |
|
"grad_norm": 0.002098744735121727, |
|
"learning_rate": 7.822899041173153e-06, |
|
"loss": 0.0002, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.8358714043993232, |
|
"grad_norm": 0.0019923567306250334, |
|
"learning_rate": 7.766497461928934e-06, |
|
"loss": 0.0004, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.844331641285956, |
|
"grad_norm": 0.002045057248324156, |
|
"learning_rate": 7.710095882684715e-06, |
|
"loss": 0.0002, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.8527918781725887, |
|
"grad_norm": 0.002222336595878005, |
|
"learning_rate": 7.653694303440496e-06, |
|
"loss": 0.0002, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.8612521150592216, |
|
"grad_norm": 0.0021004409063607454, |
|
"learning_rate": 7.597292724196278e-06, |
|
"loss": 0.0002, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.8697123519458545, |
|
"grad_norm": 0.001979230437427759, |
|
"learning_rate": 7.540891144952059e-06, |
|
"loss": 0.0002, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.8781725888324874, |
|
"grad_norm": 0.0020105023868381977, |
|
"learning_rate": 7.484489565707841e-06, |
|
"loss": 0.0002, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.8866328257191203, |
|
"grad_norm": 0.0020976634696125984, |
|
"learning_rate": 7.428087986463621e-06, |
|
"loss": 0.0002, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.895093062605753, |
|
"grad_norm": 0.0020316061563789845, |
|
"learning_rate": 7.371686407219403e-06, |
|
"loss": 0.0002, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.9035532994923858, |
|
"grad_norm": 0.00200808048248291, |
|
"learning_rate": 7.315284827975184e-06, |
|
"loss": 0.0002, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.9120135363790185, |
|
"grad_norm": 0.002250692341476679, |
|
"learning_rate": 7.258883248730964e-06, |
|
"loss": 0.0002, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.9204737732656514, |
|
"grad_norm": 0.0022177433129400015, |
|
"learning_rate": 7.202481669486746e-06, |
|
"loss": 0.0007, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.9289340101522843, |
|
"grad_norm": 0.0018741836538538337, |
|
"learning_rate": 7.146080090242527e-06, |
|
"loss": 0.0002, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.9373942470389172, |
|
"grad_norm": 0.002023757202550769, |
|
"learning_rate": 7.089678510998309e-06, |
|
"loss": 0.0002, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.94585448392555, |
|
"grad_norm": 0.0021521553862839937, |
|
"learning_rate": 7.033276931754089e-06, |
|
"loss": 0.0002, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.9543147208121827, |
|
"grad_norm": 0.0019342171726748347, |
|
"learning_rate": 6.976875352509871e-06, |
|
"loss": 0.0008, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.9627749576988156, |
|
"grad_norm": 0.0019481374183669686, |
|
"learning_rate": 6.920473773265652e-06, |
|
"loss": 0.0007, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.9712351945854483, |
|
"grad_norm": 0.002077361335977912, |
|
"learning_rate": 6.864072194021433e-06, |
|
"loss": 0.0011, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.9796954314720812, |
|
"grad_norm": 0.0020031894091516733, |
|
"learning_rate": 6.807670614777214e-06, |
|
"loss": 0.0003, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.988155668358714, |
|
"grad_norm": 0.00219974503852427, |
|
"learning_rate": 6.751269035532996e-06, |
|
"loss": 0.0002, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.996615905245347, |
|
"grad_norm": 0.002148480387404561, |
|
"learning_rate": 6.694867456288776e-06, |
|
"loss": 0.0002, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.00507614213198, |
|
"grad_norm": 0.0018948954530060291, |
|
"learning_rate": 6.638465877044558e-06, |
|
"loss": 0.0002, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.0135363790186127, |
|
"grad_norm": 0.007534192409366369, |
|
"learning_rate": 6.582064297800339e-06, |
|
"loss": 0.0002, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.021996615905245, |
|
"grad_norm": 0.0018520756857469678, |
|
"learning_rate": 6.525662718556121e-06, |
|
"loss": 0.0002, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.030456852791878, |
|
"grad_norm": 0.0020236349664628506, |
|
"learning_rate": 6.469261139311901e-06, |
|
"loss": 0.0002, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.038917089678511, |
|
"grad_norm": 0.0018428469775244594, |
|
"learning_rate": 6.412859560067682e-06, |
|
"loss": 0.0002, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.047377326565144, |
|
"grad_norm": 0.0018592760898172855, |
|
"learning_rate": 6.356457980823464e-06, |
|
"loss": 0.0002, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.0558375634517767, |
|
"grad_norm": 0.001955309882760048, |
|
"learning_rate": 6.300056401579244e-06, |
|
"loss": 0.0002, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.0642978003384096, |
|
"grad_norm": 0.001861874246969819, |
|
"learning_rate": 6.243654822335026e-06, |
|
"loss": 0.0002, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.0727580372250425, |
|
"grad_norm": 0.0018448787741363049, |
|
"learning_rate": 6.187253243090807e-06, |
|
"loss": 0.0002, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.081218274111675, |
|
"grad_norm": 0.0018381918780505657, |
|
"learning_rate": 6.130851663846589e-06, |
|
"loss": 0.0002, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.089678510998308, |
|
"grad_norm": 0.0018074287800118327, |
|
"learning_rate": 6.074450084602369e-06, |
|
"loss": 0.0002, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.0981387478849407, |
|
"grad_norm": 0.0018625473603606224, |
|
"learning_rate": 6.018048505358151e-06, |
|
"loss": 0.0002, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.1065989847715736, |
|
"grad_norm": 0.0017794481245800853, |
|
"learning_rate": 5.961646926113932e-06, |
|
"loss": 0.0002, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.1150592216582065, |
|
"grad_norm": 0.0017721441108733416, |
|
"learning_rate": 5.905245346869712e-06, |
|
"loss": 0.0002, |
|
"step": 2500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3546, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 746156802869100.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|