|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.2770083102493075, |
|
"eval_steps": 500, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.002770083102493075, |
|
"grad_norm": 15.603601455688477, |
|
"learning_rate": 7.5e-06, |
|
"loss": 1.4608, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00554016620498615, |
|
"grad_norm": 14.760490417480469, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.428, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.008310249307479225, |
|
"grad_norm": 19.810142517089844, |
|
"learning_rate": 2.25e-05, |
|
"loss": 1.2831, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0110803324099723, |
|
"grad_norm": 6.9140729904174805, |
|
"learning_rate": 3e-05, |
|
"loss": 1.0688, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.013850415512465374, |
|
"grad_norm": 6.4214253425598145, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 1.2067, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01662049861495845, |
|
"grad_norm": 11.87337589263916, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.4816, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.019390581717451522, |
|
"grad_norm": 6.669366359710693, |
|
"learning_rate": 5.25e-05, |
|
"loss": 1.1287, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0221606648199446, |
|
"grad_norm": 8.616178512573242, |
|
"learning_rate": 6e-05, |
|
"loss": 1.2203, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.024930747922437674, |
|
"grad_norm": 9.265680313110352, |
|
"learning_rate": 5.999881193903433e-05, |
|
"loss": 1.1897, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.027700831024930747, |
|
"grad_norm": 5.8777852058410645, |
|
"learning_rate": 5.999524785023657e-05, |
|
"loss": 1.0788, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.030470914127423823, |
|
"grad_norm": 5.538949012756348, |
|
"learning_rate": 5.998930801589704e-05, |
|
"loss": 1.0736, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0332409972299169, |
|
"grad_norm": 5.686643123626709, |
|
"learning_rate": 5.9980992906474764e-05, |
|
"loss": 1.1573, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.036011080332409975, |
|
"grad_norm": 6.211599349975586, |
|
"learning_rate": 5.9970303180560196e-05, |
|
"loss": 1.1618, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.038781163434903045, |
|
"grad_norm": 5.079687595367432, |
|
"learning_rate": 5.995723968482309e-05, |
|
"loss": 0.9948, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04155124653739612, |
|
"grad_norm": 5.150979042053223, |
|
"learning_rate": 5.994180345394539e-05, |
|
"loss": 1.0328, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0443213296398892, |
|
"grad_norm": 5.891695499420166, |
|
"learning_rate": 5.9923995710539324e-05, |
|
"loss": 1.1854, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04709141274238227, |
|
"grad_norm": 5.33378791809082, |
|
"learning_rate": 5.990381786505055e-05, |
|
"loss": 1.0424, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04986149584487535, |
|
"grad_norm": 4.58151912689209, |
|
"learning_rate": 5.988127151564644e-05, |
|
"loss": 1.0392, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05263157894736842, |
|
"grad_norm": 5.48449182510376, |
|
"learning_rate": 5.9856358448089506e-05, |
|
"loss": 1.147, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.055401662049861494, |
|
"grad_norm": 5.061849117279053, |
|
"learning_rate": 5.9829080635595944e-05, |
|
"loss": 1.1072, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05817174515235457, |
|
"grad_norm": 4.828271389007568, |
|
"learning_rate": 5.979944023867938e-05, |
|
"loss": 1.1132, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.060941828254847646, |
|
"grad_norm": 5.035257816314697, |
|
"learning_rate": 5.976743960497973e-05, |
|
"loss": 1.0272, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.06371191135734072, |
|
"grad_norm": 5.55271577835083, |
|
"learning_rate": 5.973308126907723e-05, |
|
"loss": 1.1674, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0664819944598338, |
|
"grad_norm": 4.926880836486816, |
|
"learning_rate": 5.969636795229172e-05, |
|
"loss": 1.0765, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06925207756232687, |
|
"grad_norm": 5.437366962432861, |
|
"learning_rate": 5.965730256246713e-05, |
|
"loss": 1.1344, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07202216066481995, |
|
"grad_norm": 4.4522786140441895, |
|
"learning_rate": 5.9615888193741094e-05, |
|
"loss": 0.9976, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.07479224376731301, |
|
"grad_norm": 5.204329967498779, |
|
"learning_rate": 5.9572128126299946e-05, |
|
"loss": 1.1298, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07756232686980609, |
|
"grad_norm": 5.264033317565918, |
|
"learning_rate": 5.9526025826118885e-05, |
|
"loss": 1.0428, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.08033240997229917, |
|
"grad_norm": 5.672830581665039, |
|
"learning_rate": 5.947758494468746e-05, |
|
"loss": 1.1636, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.08310249307479224, |
|
"grad_norm": 5.798141002655029, |
|
"learning_rate": 5.942680931872036e-05, |
|
"loss": 1.1112, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08587257617728532, |
|
"grad_norm": 4.760410308837891, |
|
"learning_rate": 5.937370296985354e-05, |
|
"loss": 1.0444, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0886426592797784, |
|
"grad_norm": 5.4676313400268555, |
|
"learning_rate": 5.931827010432566e-05, |
|
"loss": 1.0957, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09141274238227147, |
|
"grad_norm": 4.983479976654053, |
|
"learning_rate": 5.9260515112644995e-05, |
|
"loss": 1.0734, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.09418282548476455, |
|
"grad_norm": 5.242843151092529, |
|
"learning_rate": 5.9200442569241606e-05, |
|
"loss": 1.0879, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09695290858725762, |
|
"grad_norm": 5.12119722366333, |
|
"learning_rate": 5.9138057232105084e-05, |
|
"loss": 1.0841, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0997229916897507, |
|
"grad_norm": 5.087031841278076, |
|
"learning_rate": 5.9073364042407705e-05, |
|
"loss": 1.0343, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.10249307479224377, |
|
"grad_norm": 5.226377487182617, |
|
"learning_rate": 5.900636812411301e-05, |
|
"loss": 1.0775, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.10526315789473684, |
|
"grad_norm": 5.388728141784668, |
|
"learning_rate": 5.893707478357005e-05, |
|
"loss": 1.0439, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.10803324099722991, |
|
"grad_norm": 5.6382951736450195, |
|
"learning_rate": 5.886548950909301e-05, |
|
"loss": 1.096, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.11080332409972299, |
|
"grad_norm": 5.046523094177246, |
|
"learning_rate": 5.879161797052658e-05, |
|
"loss": 0.9928, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.11357340720221606, |
|
"grad_norm": 5.98545503616333, |
|
"learning_rate": 5.8715466018796865e-05, |
|
"loss": 1.1639, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.11634349030470914, |
|
"grad_norm": 5.064982891082764, |
|
"learning_rate": 5.863703968544795e-05, |
|
"loss": 0.9878, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.11911357340720222, |
|
"grad_norm": 5.36715841293335, |
|
"learning_rate": 5.8556345182164176e-05, |
|
"loss": 1.056, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.12188365650969529, |
|
"grad_norm": 4.7610368728637695, |
|
"learning_rate": 5.8473388900278206e-05, |
|
"loss": 0.9625, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.12465373961218837, |
|
"grad_norm": 5.5188889503479, |
|
"learning_rate": 5.83881774102647e-05, |
|
"loss": 1.0828, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.12742382271468145, |
|
"grad_norm": 5.703125476837158, |
|
"learning_rate": 5.8300717461220027e-05, |
|
"loss": 1.0632, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.13019390581717452, |
|
"grad_norm": 5.264132499694824, |
|
"learning_rate": 5.821101598032759e-05, |
|
"loss": 1.0057, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1329639889196676, |
|
"grad_norm": 6.05692720413208, |
|
"learning_rate": 5.811908007230929e-05, |
|
"loss": 1.0565, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.13573407202216067, |
|
"grad_norm": 4.773650169372559, |
|
"learning_rate": 5.802491701886268e-05, |
|
"loss": 0.9764, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.13850415512465375, |
|
"grad_norm": 5.461302757263184, |
|
"learning_rate": 5.792853427808431e-05, |
|
"loss": 1.0002, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.14127423822714683, |
|
"grad_norm": 5.83071231842041, |
|
"learning_rate": 5.7829939483878996e-05, |
|
"loss": 1.0353, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1440443213296399, |
|
"grad_norm": 5.715704917907715, |
|
"learning_rate": 5.772914044535516e-05, |
|
"loss": 0.985, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.14681440443213298, |
|
"grad_norm": 5.309004306793213, |
|
"learning_rate": 5.762614514620634e-05, |
|
"loss": 1.0106, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.14958448753462603, |
|
"grad_norm": 5.693915843963623, |
|
"learning_rate": 5.752096174407884e-05, |
|
"loss": 1.0049, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1523545706371191, |
|
"grad_norm": 5.644995212554932, |
|
"learning_rate": 5.741359856992561e-05, |
|
"loss": 1.0411, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.15512465373961218, |
|
"grad_norm": 5.765170574188232, |
|
"learning_rate": 5.73040641273464e-05, |
|
"loss": 1.0039, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.15789473684210525, |
|
"grad_norm": 5.643161296844482, |
|
"learning_rate": 5.719236709191428e-05, |
|
"loss": 1.0173, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.16066481994459833, |
|
"grad_norm": 5.821686267852783, |
|
"learning_rate": 5.707851631048841e-05, |
|
"loss": 0.9725, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.1634349030470914, |
|
"grad_norm": 6.034667015075684, |
|
"learning_rate": 5.6962520800513414e-05, |
|
"loss": 1.0381, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.16620498614958448, |
|
"grad_norm": 5.336913585662842, |
|
"learning_rate": 5.6844389749305136e-05, |
|
"loss": 0.9266, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.16897506925207756, |
|
"grad_norm": 5.65543270111084, |
|
"learning_rate": 5.672413251332297e-05, |
|
"loss": 0.9768, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.17174515235457063, |
|
"grad_norm": 5.3824615478515625, |
|
"learning_rate": 5.6601758617428766e-05, |
|
"loss": 0.9276, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.1745152354570637, |
|
"grad_norm": 5.195915222167969, |
|
"learning_rate": 5.647727775413245e-05, |
|
"loss": 0.9332, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1772853185595568, |
|
"grad_norm": 5.240732669830322, |
|
"learning_rate": 5.6350699782824346e-05, |
|
"loss": 0.9012, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.18005540166204986, |
|
"grad_norm": 5.112541675567627, |
|
"learning_rate": 5.622203472899423e-05, |
|
"loss": 0.9211, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.18282548476454294, |
|
"grad_norm": 6.605805397033691, |
|
"learning_rate": 5.609129278343731e-05, |
|
"loss": 0.9847, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.18559556786703602, |
|
"grad_norm": 5.1862568855285645, |
|
"learning_rate": 5.595848430144705e-05, |
|
"loss": 0.9064, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.1883656509695291, |
|
"grad_norm": 6.03863525390625, |
|
"learning_rate": 5.582361980199504e-05, |
|
"loss": 0.9405, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.19113573407202217, |
|
"grad_norm": 6.4853901863098145, |
|
"learning_rate": 5.568670996689773e-05, |
|
"loss": 0.9676, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.19390581717451524, |
|
"grad_norm": 5.415260314941406, |
|
"learning_rate": 5.554776563997056e-05, |
|
"loss": 0.8873, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.19667590027700832, |
|
"grad_norm": 5.11320686340332, |
|
"learning_rate": 5.540679782616892e-05, |
|
"loss": 0.8752, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.1994459833795014, |
|
"grad_norm": 4.8276214599609375, |
|
"learning_rate": 5.52638176907166e-05, |
|
"loss": 0.8441, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.20221606648199447, |
|
"grad_norm": 5.574111461639404, |
|
"learning_rate": 5.5118836558221475e-05, |
|
"loss": 0.907, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.20498614958448755, |
|
"grad_norm": 5.575868606567383, |
|
"learning_rate": 5.497186591177849e-05, |
|
"loss": 0.8691, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.2077562326869806, |
|
"grad_norm": 5.388404846191406, |
|
"learning_rate": 5.4822917392060184e-05, |
|
"loss": 0.8935, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 6.2548747062683105, |
|
"learning_rate": 5.46720027963947e-05, |
|
"loss": 0.8814, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.21329639889196675, |
|
"grad_norm": 5.358329772949219, |
|
"learning_rate": 5.4519134077831395e-05, |
|
"loss": 0.9041, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.21606648199445982, |
|
"grad_norm": 6.0121564865112305, |
|
"learning_rate": 5.4364323344194095e-05, |
|
"loss": 0.8568, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2188365650969529, |
|
"grad_norm": 6.2294020652771, |
|
"learning_rate": 5.420758285712211e-05, |
|
"loss": 0.9205, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.22160664819944598, |
|
"grad_norm": 5.889683723449707, |
|
"learning_rate": 5.404892503109906e-05, |
|
"loss": 0.8264, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.22437673130193905, |
|
"grad_norm": 5.401793956756592, |
|
"learning_rate": 5.388836243246963e-05, |
|
"loss": 0.8574, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.22714681440443213, |
|
"grad_norm": 6.194601058959961, |
|
"learning_rate": 5.372590777844421e-05, |
|
"loss": 0.8742, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2299168975069252, |
|
"grad_norm": 5.504939079284668, |
|
"learning_rate": 5.356157393609167e-05, |
|
"loss": 0.8261, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.23268698060941828, |
|
"grad_norm": 5.818756103515625, |
|
"learning_rate": 5.339537392132025e-05, |
|
"loss": 0.8364, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.23545706371191136, |
|
"grad_norm": 5.8196024894714355, |
|
"learning_rate": 5.322732089784661e-05, |
|
"loss": 0.8577, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.23822714681440443, |
|
"grad_norm": 6.102773189544678, |
|
"learning_rate": 5.305742817615325e-05, |
|
"loss": 0.8708, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2409972299168975, |
|
"grad_norm": 6.225861549377441, |
|
"learning_rate": 5.288570921243423e-05, |
|
"loss": 0.8654, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.24376731301939059, |
|
"grad_norm": 6.0107340812683105, |
|
"learning_rate": 5.2712177607529405e-05, |
|
"loss": 0.8401, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.24653739612188366, |
|
"grad_norm": 6.3431715965271, |
|
"learning_rate": 5.2536847105847185e-05, |
|
"loss": 0.855, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.24930747922437674, |
|
"grad_norm": 5.196923732757568, |
|
"learning_rate": 5.235973159427591e-05, |
|
"loss": 0.756, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2520775623268698, |
|
"grad_norm": 6.157230377197266, |
|
"learning_rate": 5.218084510108397e-05, |
|
"loss": 0.856, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2548476454293629, |
|
"grad_norm": 5.813492774963379, |
|
"learning_rate": 5.200020179480868e-05, |
|
"loss": 0.7829, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.25761772853185594, |
|
"grad_norm": 6.468319416046143, |
|
"learning_rate": 5.181781598313409e-05, |
|
"loss": 0.8327, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.26038781163434904, |
|
"grad_norm": 6.1590423583984375, |
|
"learning_rate": 5.16337021117578e-05, |
|
"loss": 0.8035, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2631578947368421, |
|
"grad_norm": 6.378724575042725, |
|
"learning_rate": 5.14478747632467e-05, |
|
"loss": 0.8134, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2659279778393352, |
|
"grad_norm": 5.47311544418335, |
|
"learning_rate": 5.126034865588208e-05, |
|
"loss": 0.7149, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.26869806094182824, |
|
"grad_norm": 5.790994644165039, |
|
"learning_rate": 5.107113864249381e-05, |
|
"loss": 0.7714, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.27146814404432135, |
|
"grad_norm": 5.880769729614258, |
|
"learning_rate": 5.088025970928399e-05, |
|
"loss": 0.7519, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2742382271468144, |
|
"grad_norm": 5.894231796264648, |
|
"learning_rate": 5.068772697463992e-05, |
|
"loss": 0.8124, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.2770083102493075, |
|
"grad_norm": 6.011329174041748, |
|
"learning_rate": 5.0493555687936704e-05, |
|
"loss": 0.7536, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 361, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.2857642925601587e+17, |
|
"train_batch_size": 9, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|