|
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 0.6913836314925245,
|
|
"global_step": 40000,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 4.9913577046063434e-05,
|
|
"loss": 2.9531,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 4.982715409212687e-05,
|
|
"loss": 2.7985,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 4.97407311381903e-05,
|
|
"loss": 2.7573,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 4.965430818425374e-05,
|
|
"loss": 2.7173,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 4.956788523031717e-05,
|
|
"loss": 2.7035,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 4.948146227638061e-05,
|
|
"loss": 2.6966,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 4.939503932244404e-05,
|
|
"loss": 2.6763,
|
|
"step": 3500
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 4.930861636850748e-05,
|
|
"loss": 2.6809,
|
|
"step": 4000
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 4.922219341457091e-05,
|
|
"loss": 2.6415,
|
|
"step": 4500
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 4.913577046063435e-05,
|
|
"loss": 2.6747,
|
|
"step": 5000
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 4.904934750669778e-05,
|
|
"loss": 2.6637,
|
|
"step": 5500
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 4.896292455276122e-05,
|
|
"loss": 2.6527,
|
|
"step": 6000
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 4.887650159882465e-05,
|
|
"loss": 2.6423,
|
|
"step": 6500
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 4.879007864488808e-05,
|
|
"loss": 2.6331,
|
|
"step": 7000
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 4.8703655690951524e-05,
|
|
"loss": 2.5868,
|
|
"step": 7500
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 4.8617232737014955e-05,
|
|
"loss": 2.5901,
|
|
"step": 8000
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 4.853080978307839e-05,
|
|
"loss": 2.5846,
|
|
"step": 8500
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 4.8444386829141824e-05,
|
|
"loss": 2.6172,
|
|
"step": 9000
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 4.835796387520526e-05,
|
|
"loss": 2.6035,
|
|
"step": 9500
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 4.827154092126869e-05,
|
|
"loss": 2.6213,
|
|
"step": 10000
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 4.8185117967332124e-05,
|
|
"loss": 2.5827,
|
|
"step": 10500
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 4.809869501339556e-05,
|
|
"loss": 2.606,
|
|
"step": 11000
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 4.801227205945899e-05,
|
|
"loss": 2.5827,
|
|
"step": 11500
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 4.792584910552243e-05,
|
|
"loss": 2.5601,
|
|
"step": 12000
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 4.783942615158586e-05,
|
|
"loss": 2.554,
|
|
"step": 12500
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 4.77530031976493e-05,
|
|
"loss": 2.5501,
|
|
"step": 13000
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 4.766658024371273e-05,
|
|
"loss": 2.5841,
|
|
"step": 13500
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 4.758015728977617e-05,
|
|
"loss": 2.547,
|
|
"step": 14000
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 4.74937343358396e-05,
|
|
"loss": 2.5823,
|
|
"step": 14500
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 4.740731138190304e-05,
|
|
"loss": 2.5408,
|
|
"step": 15000
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 4.732088842796647e-05,
|
|
"loss": 2.5678,
|
|
"step": 15500
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 4.72344654740299e-05,
|
|
"loss": 2.5628,
|
|
"step": 16000
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 4.714804252009334e-05,
|
|
"loss": 2.5549,
|
|
"step": 16500
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 4.706161956615677e-05,
|
|
"loss": 2.5335,
|
|
"step": 17000
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 4.697519661222021e-05,
|
|
"loss": 2.5287,
|
|
"step": 17500
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 4.688877365828364e-05,
|
|
"loss": 2.5478,
|
|
"step": 18000
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 4.680235070434708e-05,
|
|
"loss": 2.5501,
|
|
"step": 18500
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 4.671592775041051e-05,
|
|
"loss": 2.5335,
|
|
"step": 19000
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 4.6629504796473946e-05,
|
|
"loss": 2.5057,
|
|
"step": 19500
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 4.654308184253738e-05,
|
|
"loss": 2.5316,
|
|
"step": 20000
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 4.6456658888600815e-05,
|
|
"loss": 2.5584,
|
|
"step": 20500
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 4.6370235934664246e-05,
|
|
"loss": 2.5184,
|
|
"step": 21000
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 4.6283812980727684e-05,
|
|
"loss": 2.5301,
|
|
"step": 21500
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 4.6197390026791115e-05,
|
|
"loss": 2.5337,
|
|
"step": 22000
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 4.611096707285455e-05,
|
|
"loss": 2.5136,
|
|
"step": 22500
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 4.602454411891799e-05,
|
|
"loss": 2.4641,
|
|
"step": 23000
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 4.593812116498142e-05,
|
|
"loss": 2.5473,
|
|
"step": 23500
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 4.585169821104486e-05,
|
|
"loss": 2.5219,
|
|
"step": 24000
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 4.576527525710829e-05,
|
|
"loss": 2.5111,
|
|
"step": 24500
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 4.567885230317173e-05,
|
|
"loss": 2.5045,
|
|
"step": 25000
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 4.559242934923516e-05,
|
|
"loss": 2.4803,
|
|
"step": 25500
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 4.550600639529859e-05,
|
|
"loss": 2.478,
|
|
"step": 26000
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 4.541958344136203e-05,
|
|
"loss": 2.502,
|
|
"step": 26500
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 4.533316048742546e-05,
|
|
"loss": 2.5121,
|
|
"step": 27000
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 4.52467375334889e-05,
|
|
"loss": 2.4963,
|
|
"step": 27500
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 4.516031457955233e-05,
|
|
"loss": 2.4793,
|
|
"step": 28000
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 4.507389162561577e-05,
|
|
"loss": 2.5068,
|
|
"step": 28500
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 4.49874686716792e-05,
|
|
"loss": 2.5034,
|
|
"step": 29000
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 4.490104571774264e-05,
|
|
"loss": 2.4744,
|
|
"step": 29500
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 4.481462276380607e-05,
|
|
"loss": 2.4565,
|
|
"step": 30000
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 4.4728199809869506e-05,
|
|
"loss": 2.491,
|
|
"step": 30500
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 4.464177685593294e-05,
|
|
"loss": 2.5165,
|
|
"step": 31000
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 4.4555353901996375e-05,
|
|
"loss": 2.4936,
|
|
"step": 31500
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 4.4468930948059806e-05,
|
|
"loss": 2.4991,
|
|
"step": 32000
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 4.438250799412324e-05,
|
|
"loss": 2.454,
|
|
"step": 32500
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 4.4296085040186675e-05,
|
|
"loss": 2.4699,
|
|
"step": 33000
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 4.4209662086250106e-05,
|
|
"loss": 2.4951,
|
|
"step": 33500
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 4.4123239132313544e-05,
|
|
"loss": 2.4536,
|
|
"step": 34000
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 4.4036816178376975e-05,
|
|
"loss": 2.4745,
|
|
"step": 34500
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 4.395039322444041e-05,
|
|
"loss": 2.4752,
|
|
"step": 35000
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 4.3863970270503844e-05,
|
|
"loss": 2.4757,
|
|
"step": 35500
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 4.377754731656728e-05,
|
|
"loss": 2.5043,
|
|
"step": 36000
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 4.3691124362630714e-05,
|
|
"loss": 2.4513,
|
|
"step": 36500
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 4.360470140869415e-05,
|
|
"loss": 2.4683,
|
|
"step": 37000
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 4.351827845475759e-05,
|
|
"loss": 2.4723,
|
|
"step": 37500
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 4.343185550082102e-05,
|
|
"loss": 2.4607,
|
|
"step": 38000
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 4.334543254688446e-05,
|
|
"loss": 2.4937,
|
|
"step": 38500
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 4.325900959294789e-05,
|
|
"loss": 2.4491,
|
|
"step": 39000
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 4.317258663901133e-05,
|
|
"loss": 2.4476,
|
|
"step": 39500
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 4.308616368507476e-05,
|
|
"loss": 2.479,
|
|
"step": 40000
|
|
}
|
|
],
|
|
"max_steps": 289275,
|
|
"num_train_epochs": 5,
|
|
"total_flos": 3.2856743048380416e+16,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|