Thiyaga158's picture
Upload 95 files
19d50d2 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.8460236886632826,
"eval_steps": 500,
"global_step": 1000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.008460236886632826,
"grad_norm": 2.4233109951019287,
"learning_rate": 1.9949238578680207e-05,
"loss": 1.3216,
"step": 10
},
{
"epoch": 0.01692047377326565,
"grad_norm": 1.1721662282943726,
"learning_rate": 1.9892836999435986e-05,
"loss": 0.5312,
"step": 20
},
{
"epoch": 0.025380710659898477,
"grad_norm": 0.899889349937439,
"learning_rate": 1.9836435420191765e-05,
"loss": 0.2969,
"step": 30
},
{
"epoch": 0.0338409475465313,
"grad_norm": 0.4674186408519745,
"learning_rate": 1.9780033840947548e-05,
"loss": 0.1564,
"step": 40
},
{
"epoch": 0.04230118443316413,
"grad_norm": 0.9013752937316895,
"learning_rate": 1.972363226170333e-05,
"loss": 0.0966,
"step": 50
},
{
"epoch": 0.050761421319796954,
"grad_norm": 0.364164799451828,
"learning_rate": 1.966723068245911e-05,
"loss": 0.0516,
"step": 60
},
{
"epoch": 0.05922165820642978,
"grad_norm": 0.22717136144638062,
"learning_rate": 1.9610829103214893e-05,
"loss": 0.0307,
"step": 70
},
{
"epoch": 0.0676818950930626,
"grad_norm": 0.09149003773927689,
"learning_rate": 1.9554427523970672e-05,
"loss": 0.0182,
"step": 80
},
{
"epoch": 0.07614213197969544,
"grad_norm": 0.12942063808441162,
"learning_rate": 1.949802594472645e-05,
"loss": 0.017,
"step": 90
},
{
"epoch": 0.08460236886632826,
"grad_norm": 0.27440059185028076,
"learning_rate": 1.9441624365482234e-05,
"loss": 0.0113,
"step": 100
},
{
"epoch": 0.09306260575296109,
"grad_norm": 0.5005059242248535,
"learning_rate": 1.9385222786238017e-05,
"loss": 0.0082,
"step": 110
},
{
"epoch": 0.10152284263959391,
"grad_norm": 0.19820384681224823,
"learning_rate": 1.9328821206993796e-05,
"loss": 0.0057,
"step": 120
},
{
"epoch": 0.10998307952622674,
"grad_norm": 0.03431914001703262,
"learning_rate": 1.927241962774958e-05,
"loss": 0.0055,
"step": 130
},
{
"epoch": 0.11844331641285956,
"grad_norm": 0.4414117634296417,
"learning_rate": 1.921601804850536e-05,
"loss": 0.0063,
"step": 140
},
{
"epoch": 0.12690355329949238,
"grad_norm": 0.9055352807044983,
"learning_rate": 1.915961646926114e-05,
"loss": 0.0048,
"step": 150
},
{
"epoch": 0.1353637901861252,
"grad_norm": 0.036724768579006195,
"learning_rate": 1.910321489001692e-05,
"loss": 0.0035,
"step": 160
},
{
"epoch": 0.14382402707275804,
"grad_norm": 0.0888664722442627,
"learning_rate": 1.9046813310772703e-05,
"loss": 0.0033,
"step": 170
},
{
"epoch": 0.15228426395939088,
"grad_norm": 0.019098607823252678,
"learning_rate": 1.8990411731528486e-05,
"loss": 0.0029,
"step": 180
},
{
"epoch": 0.16074450084602368,
"grad_norm": 0.0171552412211895,
"learning_rate": 1.8934010152284265e-05,
"loss": 0.0029,
"step": 190
},
{
"epoch": 0.1692047377326565,
"grad_norm": 0.01885647512972355,
"learning_rate": 1.8877608573040048e-05,
"loss": 0.0028,
"step": 200
},
{
"epoch": 0.17766497461928935,
"grad_norm": 0.01599389687180519,
"learning_rate": 1.8821206993795827e-05,
"loss": 0.0022,
"step": 210
},
{
"epoch": 0.18612521150592218,
"grad_norm": 0.017242038622498512,
"learning_rate": 1.876480541455161e-05,
"loss": 0.0023,
"step": 220
},
{
"epoch": 0.19458544839255498,
"grad_norm": 0.5941590666770935,
"learning_rate": 1.870840383530739e-05,
"loss": 0.0031,
"step": 230
},
{
"epoch": 0.20304568527918782,
"grad_norm": 0.4872148931026459,
"learning_rate": 1.8652002256063172e-05,
"loss": 0.0027,
"step": 240
},
{
"epoch": 0.21150592216582065,
"grad_norm": 0.014173777773976326,
"learning_rate": 1.859560067681895e-05,
"loss": 0.0021,
"step": 250
},
{
"epoch": 0.21996615905245348,
"grad_norm": 0.024433018639683723,
"learning_rate": 1.8539199097574734e-05,
"loss": 0.0017,
"step": 260
},
{
"epoch": 0.22842639593908629,
"grad_norm": 0.017857005819678307,
"learning_rate": 1.8482797518330516e-05,
"loss": 0.0017,
"step": 270
},
{
"epoch": 0.23688663282571912,
"grad_norm": 0.014704135246574879,
"learning_rate": 1.8426395939086296e-05,
"loss": 0.0021,
"step": 280
},
{
"epoch": 0.24534686971235195,
"grad_norm": 0.022361995652318,
"learning_rate": 1.8369994359842075e-05,
"loss": 0.0016,
"step": 290
},
{
"epoch": 0.25380710659898476,
"grad_norm": 0.01112055778503418,
"learning_rate": 1.8313592780597858e-05,
"loss": 0.0015,
"step": 300
},
{
"epoch": 0.2622673434856176,
"grad_norm": 0.010048450902104378,
"learning_rate": 1.825719120135364e-05,
"loss": 0.0015,
"step": 310
},
{
"epoch": 0.2707275803722504,
"grad_norm": 0.012935510836541653,
"learning_rate": 1.820078962210942e-05,
"loss": 0.0014,
"step": 320
},
{
"epoch": 0.27918781725888325,
"grad_norm": 0.010541570372879505,
"learning_rate": 1.8144388042865203e-05,
"loss": 0.0013,
"step": 330
},
{
"epoch": 0.2876480541455161,
"grad_norm": 0.009258048608899117,
"learning_rate": 1.8087986463620982e-05,
"loss": 0.0012,
"step": 340
},
{
"epoch": 0.2961082910321489,
"grad_norm": 0.009454768151044846,
"learning_rate": 1.8031584884376765e-05,
"loss": 0.0015,
"step": 350
},
{
"epoch": 0.30456852791878175,
"grad_norm": 0.018366724252700806,
"learning_rate": 1.7975183305132544e-05,
"loss": 0.0012,
"step": 360
},
{
"epoch": 0.3130287648054145,
"grad_norm": 0.009252658113837242,
"learning_rate": 1.7918781725888327e-05,
"loss": 0.0011,
"step": 370
},
{
"epoch": 0.32148900169204736,
"grad_norm": 0.1430797576904297,
"learning_rate": 1.7862380146644106e-05,
"loss": 0.0018,
"step": 380
},
{
"epoch": 0.3299492385786802,
"grad_norm": 0.009182159788906574,
"learning_rate": 1.780597856739989e-05,
"loss": 0.0018,
"step": 390
},
{
"epoch": 0.338409475465313,
"grad_norm": 0.00843009352684021,
"learning_rate": 1.774957698815567e-05,
"loss": 0.0017,
"step": 400
},
{
"epoch": 0.34686971235194586,
"grad_norm": 0.008176930248737335,
"learning_rate": 1.769317540891145e-05,
"loss": 0.0013,
"step": 410
},
{
"epoch": 0.3553299492385787,
"grad_norm": 0.008539380505681038,
"learning_rate": 1.7636773829667233e-05,
"loss": 0.001,
"step": 420
},
{
"epoch": 0.3637901861252115,
"grad_norm": 0.008693977259099483,
"learning_rate": 1.7580372250423013e-05,
"loss": 0.0014,
"step": 430
},
{
"epoch": 0.37225042301184436,
"grad_norm": 0.009141940623521805,
"learning_rate": 1.7523970671178795e-05,
"loss": 0.0012,
"step": 440
},
{
"epoch": 0.38071065989847713,
"grad_norm": 0.009133282117545605,
"learning_rate": 1.7467569091934575e-05,
"loss": 0.0015,
"step": 450
},
{
"epoch": 0.38917089678510997,
"grad_norm": 0.5799979567527771,
"learning_rate": 1.7411167512690357e-05,
"loss": 0.0013,
"step": 460
},
{
"epoch": 0.3976311336717428,
"grad_norm": 0.04120843857526779,
"learning_rate": 1.7354765933446137e-05,
"loss": 0.0011,
"step": 470
},
{
"epoch": 0.40609137055837563,
"grad_norm": 0.01276948768645525,
"learning_rate": 1.729836435420192e-05,
"loss": 0.0012,
"step": 480
},
{
"epoch": 0.41455160744500846,
"grad_norm": 0.0076572224497795105,
"learning_rate": 1.7241962774957702e-05,
"loss": 0.0014,
"step": 490
},
{
"epoch": 0.4230118443316413,
"grad_norm": 0.008293437771499157,
"learning_rate": 1.718556119571348e-05,
"loss": 0.0009,
"step": 500
},
{
"epoch": 0.43147208121827413,
"grad_norm": 0.0068985214456915855,
"learning_rate": 1.712915961646926e-05,
"loss": 0.0009,
"step": 510
},
{
"epoch": 0.43993231810490696,
"grad_norm": 0.029666345566511154,
"learning_rate": 1.7072758037225043e-05,
"loss": 0.0009,
"step": 520
},
{
"epoch": 0.44839255499153974,
"grad_norm": 0.014065027236938477,
"learning_rate": 1.7016356457980826e-05,
"loss": 0.0008,
"step": 530
},
{
"epoch": 0.45685279187817257,
"grad_norm": 0.00650499714538455,
"learning_rate": 1.6959954878736606e-05,
"loss": 0.0008,
"step": 540
},
{
"epoch": 0.4653130287648054,
"grad_norm": 0.0065047661773860455,
"learning_rate": 1.6903553299492388e-05,
"loss": 0.0008,
"step": 550
},
{
"epoch": 0.47377326565143824,
"grad_norm": 0.0071571446023881435,
"learning_rate": 1.684715172024817e-05,
"loss": 0.0008,
"step": 560
},
{
"epoch": 0.48223350253807107,
"grad_norm": 0.005875805858522654,
"learning_rate": 1.679075014100395e-05,
"loss": 0.0008,
"step": 570
},
{
"epoch": 0.4906937394247039,
"grad_norm": 0.005848041269928217,
"learning_rate": 1.673434856175973e-05,
"loss": 0.0007,
"step": 580
},
{
"epoch": 0.49915397631133673,
"grad_norm": 0.006032396107912064,
"learning_rate": 1.6677946982515512e-05,
"loss": 0.0007,
"step": 590
},
{
"epoch": 0.5076142131979695,
"grad_norm": 0.005858593620359898,
"learning_rate": 1.662154540327129e-05,
"loss": 0.0007,
"step": 600
},
{
"epoch": 0.5160744500846024,
"grad_norm": 0.006055026315152645,
"learning_rate": 1.6565143824027074e-05,
"loss": 0.0007,
"step": 610
},
{
"epoch": 0.5245346869712352,
"grad_norm": 0.0055991546250879765,
"learning_rate": 1.6508742244782857e-05,
"loss": 0.0039,
"step": 620
},
{
"epoch": 0.5329949238578681,
"grad_norm": 0.006043154280632734,
"learning_rate": 1.6452340665538636e-05,
"loss": 0.0009,
"step": 630
},
{
"epoch": 0.5414551607445008,
"grad_norm": 0.006406570319086313,
"learning_rate": 1.6395939086294416e-05,
"loss": 0.0008,
"step": 640
},
{
"epoch": 0.5499153976311336,
"grad_norm": 0.006317532621324062,
"learning_rate": 1.63395375070502e-05,
"loss": 0.0007,
"step": 650
},
{
"epoch": 0.5583756345177665,
"grad_norm": 0.0260737594217062,
"learning_rate": 1.628313592780598e-05,
"loss": 0.0007,
"step": 660
},
{
"epoch": 0.5668358714043993,
"grad_norm": 0.005516720470041037,
"learning_rate": 1.622673434856176e-05,
"loss": 0.0007,
"step": 670
},
{
"epoch": 0.5752961082910322,
"grad_norm": 0.005611425265669823,
"learning_rate": 1.6170332769317543e-05,
"loss": 0.0006,
"step": 680
},
{
"epoch": 0.583756345177665,
"grad_norm": 0.005698953289538622,
"learning_rate": 1.6113931190073326e-05,
"loss": 0.0006,
"step": 690
},
{
"epoch": 0.5922165820642978,
"grad_norm": 0.005045793950557709,
"learning_rate": 1.6057529610829105e-05,
"loss": 0.0006,
"step": 700
},
{
"epoch": 0.6006768189509306,
"grad_norm": 0.005404220428317785,
"learning_rate": 1.6001128031584884e-05,
"loss": 0.0007,
"step": 710
},
{
"epoch": 0.6091370558375635,
"grad_norm": 0.005528539884835482,
"learning_rate": 1.5944726452340667e-05,
"loss": 0.0007,
"step": 720
},
{
"epoch": 0.6175972927241963,
"grad_norm": 0.0052613625302910805,
"learning_rate": 1.5888324873096446e-05,
"loss": 0.0006,
"step": 730
},
{
"epoch": 0.626057529610829,
"grad_norm": 0.0050328257493674755,
"learning_rate": 1.583192329385223e-05,
"loss": 0.0006,
"step": 740
},
{
"epoch": 0.6345177664974619,
"grad_norm": 0.005106599070131779,
"learning_rate": 1.5775521714608012e-05,
"loss": 0.0006,
"step": 750
},
{
"epoch": 0.6429780033840947,
"grad_norm": 0.004836504813283682,
"learning_rate": 1.571912013536379e-05,
"loss": 0.0006,
"step": 760
},
{
"epoch": 0.6514382402707276,
"grad_norm": 0.004986999090760946,
"learning_rate": 1.566271855611957e-05,
"loss": 0.0006,
"step": 770
},
{
"epoch": 0.6598984771573604,
"grad_norm": 0.005235906690359116,
"learning_rate": 1.5606316976875353e-05,
"loss": 0.0005,
"step": 780
},
{
"epoch": 0.6683587140439933,
"grad_norm": 0.4625732898712158,
"learning_rate": 1.5549915397631136e-05,
"loss": 0.0009,
"step": 790
},
{
"epoch": 0.676818950930626,
"grad_norm": 0.004936546087265015,
"learning_rate": 1.5493513818386915e-05,
"loss": 0.0006,
"step": 800
},
{
"epoch": 0.6852791878172588,
"grad_norm": 0.004956043791025877,
"learning_rate": 1.5437112239142698e-05,
"loss": 0.0005,
"step": 810
},
{
"epoch": 0.6937394247038917,
"grad_norm": 0.005084918346256018,
"learning_rate": 1.538071065989848e-05,
"loss": 0.0005,
"step": 820
},
{
"epoch": 0.7021996615905245,
"grad_norm": 0.006050592288374901,
"learning_rate": 1.532430908065426e-05,
"loss": 0.0005,
"step": 830
},
{
"epoch": 0.7106598984771574,
"grad_norm": 0.0045923274010419846,
"learning_rate": 1.526790750141004e-05,
"loss": 0.0005,
"step": 840
},
{
"epoch": 0.7191201353637902,
"grad_norm": 0.004406214691698551,
"learning_rate": 1.5211505922165822e-05,
"loss": 0.0005,
"step": 850
},
{
"epoch": 0.727580372250423,
"grad_norm": 0.00449965288862586,
"learning_rate": 1.5155104342921603e-05,
"loss": 0.0005,
"step": 860
},
{
"epoch": 0.7360406091370558,
"grad_norm": 0.00427517294883728,
"learning_rate": 1.5098702763677384e-05,
"loss": 0.0006,
"step": 870
},
{
"epoch": 0.7445008460236887,
"grad_norm": 0.006013456266373396,
"learning_rate": 1.5042301184433165e-05,
"loss": 0.0005,
"step": 880
},
{
"epoch": 0.7529610829103215,
"grad_norm": 0.004456107504665852,
"learning_rate": 1.4985899605188948e-05,
"loss": 0.0005,
"step": 890
},
{
"epoch": 0.7614213197969543,
"grad_norm": 0.004223175812512636,
"learning_rate": 1.4929498025944729e-05,
"loss": 0.0005,
"step": 900
},
{
"epoch": 0.7698815566835872,
"grad_norm": 0.004154487047344446,
"learning_rate": 1.4873096446700508e-05,
"loss": 0.0005,
"step": 910
},
{
"epoch": 0.7783417935702199,
"grad_norm": 0.007055574096739292,
"learning_rate": 1.4816694867456289e-05,
"loss": 0.0005,
"step": 920
},
{
"epoch": 0.7868020304568528,
"grad_norm": 0.005269153974950314,
"learning_rate": 1.476029328821207e-05,
"loss": 0.0005,
"step": 930
},
{
"epoch": 0.7952622673434856,
"grad_norm": 0.004041856154799461,
"learning_rate": 1.4703891708967853e-05,
"loss": 0.0005,
"step": 940
},
{
"epoch": 0.8037225042301185,
"grad_norm": 0.003965588286519051,
"learning_rate": 1.4647490129723634e-05,
"loss": 0.0005,
"step": 950
},
{
"epoch": 0.8121827411167513,
"grad_norm": 0.005661304574459791,
"learning_rate": 1.4591088550479415e-05,
"loss": 0.0005,
"step": 960
},
{
"epoch": 0.8206429780033841,
"grad_norm": 0.0039055896922945976,
"learning_rate": 1.4534686971235198e-05,
"loss": 0.0011,
"step": 970
},
{
"epoch": 0.8291032148900169,
"grad_norm": 1.8310741186141968,
"learning_rate": 1.4478285391990977e-05,
"loss": 0.001,
"step": 980
},
{
"epoch": 0.8375634517766497,
"grad_norm": 0.005107107572257519,
"learning_rate": 1.4421883812746758e-05,
"loss": 0.0008,
"step": 990
},
{
"epoch": 0.8460236886632826,
"grad_norm": 0.005387498531490564,
"learning_rate": 1.4365482233502539e-05,
"loss": 0.0005,
"step": 1000
}
],
"logging_steps": 10,
"max_steps": 3546,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 298645454467584.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}