|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 99.91201764057332, |
|
"eval_steps": 500, |
|
"global_step": 113300, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.9993384785005512, |
|
"grad_norm": 6.264042854309082, |
|
"learning_rate": 9.90035304501324e-06, |
|
"loss": 2.1264, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 1.9984564498346198, |
|
"grad_norm": 8.962105751037598, |
|
"learning_rate": 9.80035304501324e-06, |
|
"loss": 2.0771, |
|
"step": 2266 |
|
}, |
|
{ |
|
"epoch": 2.997574421168688, |
|
"grad_norm": 13.9079008102417, |
|
"learning_rate": 9.70044130626655e-06, |
|
"loss": 2.0072, |
|
"step": 3399 |
|
}, |
|
{ |
|
"epoch": 3.9966923925027564, |
|
"grad_norm": 9.212200164794922, |
|
"learning_rate": 9.60044130626655e-06, |
|
"loss": 1.9554, |
|
"step": 4532 |
|
}, |
|
{ |
|
"epoch": 4.995810363836824, |
|
"grad_norm": 9.967486381530762, |
|
"learning_rate": 9.50052956751986e-06, |
|
"loss": 1.9156, |
|
"step": 5665 |
|
}, |
|
{ |
|
"epoch": 5.9949283351708935, |
|
"grad_norm": 9.588685989379883, |
|
"learning_rate": 9.40061782877317e-06, |
|
"loss": 1.9031, |
|
"step": 6798 |
|
}, |
|
{ |
|
"epoch": 6.994046306504961, |
|
"grad_norm": 5.2934889793396, |
|
"learning_rate": 9.30061782877317e-06, |
|
"loss": 1.8739, |
|
"step": 7931 |
|
}, |
|
{ |
|
"epoch": 7.99316427783903, |
|
"grad_norm": 12.791667938232422, |
|
"learning_rate": 9.20070609002648e-06, |
|
"loss": 1.8476, |
|
"step": 9064 |
|
}, |
|
{ |
|
"epoch": 8.992282249173098, |
|
"grad_norm": 5.592958927154541, |
|
"learning_rate": 9.100706090026479e-06, |
|
"loss": 1.8378, |
|
"step": 10197 |
|
}, |
|
{ |
|
"epoch": 9.991400220507167, |
|
"grad_norm": 6.236687660217285, |
|
"learning_rate": 9.000706090026478e-06, |
|
"loss": 1.8141, |
|
"step": 11330 |
|
}, |
|
{ |
|
"epoch": 10.990518191841234, |
|
"grad_norm": 14.459975242614746, |
|
"learning_rate": 8.90079435127979e-06, |
|
"loss": 1.8008, |
|
"step": 12463 |
|
}, |
|
{ |
|
"epoch": 11.989636163175303, |
|
"grad_norm": 7.29625940322876, |
|
"learning_rate": 8.800794351279789e-06, |
|
"loss": 1.7965, |
|
"step": 13596 |
|
}, |
|
{ |
|
"epoch": 12.98875413450937, |
|
"grad_norm": 7.927811622619629, |
|
"learning_rate": 8.7008826125331e-06, |
|
"loss": 1.7768, |
|
"step": 14729 |
|
}, |
|
{ |
|
"epoch": 13.98787210584344, |
|
"grad_norm": 7.546788692474365, |
|
"learning_rate": 8.600882612533099e-06, |
|
"loss": 1.7735, |
|
"step": 15862 |
|
}, |
|
{ |
|
"epoch": 14.986990077177508, |
|
"grad_norm": 2.396638870239258, |
|
"learning_rate": 8.500970873786408e-06, |
|
"loss": 1.7688, |
|
"step": 16995 |
|
}, |
|
{ |
|
"epoch": 15.986108048511577, |
|
"grad_norm": 4.344698905944824, |
|
"learning_rate": 8.400970873786408e-06, |
|
"loss": 1.7521, |
|
"step": 18128 |
|
}, |
|
{ |
|
"epoch": 16.985226019845644, |
|
"grad_norm": 3.3172178268432617, |
|
"learning_rate": 8.300970873786409e-06, |
|
"loss": 1.7524, |
|
"step": 19261 |
|
}, |
|
{ |
|
"epoch": 17.984343991179713, |
|
"grad_norm": 4.233256816864014, |
|
"learning_rate": 8.200970873786408e-06, |
|
"loss": 1.7394, |
|
"step": 20394 |
|
}, |
|
{ |
|
"epoch": 18.983461962513783, |
|
"grad_norm": 3.924440860748291, |
|
"learning_rate": 8.101147396293027e-06, |
|
"loss": 1.7355, |
|
"step": 21527 |
|
}, |
|
{ |
|
"epoch": 19.982579933847852, |
|
"grad_norm": 8.452503204345703, |
|
"learning_rate": 8.001147396293027e-06, |
|
"loss": 1.7245, |
|
"step": 22660 |
|
}, |
|
{ |
|
"epoch": 20.981697905181917, |
|
"grad_norm": 10.714750289916992, |
|
"learning_rate": 7.901147396293028e-06, |
|
"loss": 1.7205, |
|
"step": 23793 |
|
}, |
|
{ |
|
"epoch": 21.980815876515987, |
|
"grad_norm": 12.158623695373535, |
|
"learning_rate": 7.801147396293029e-06, |
|
"loss": 1.7207, |
|
"step": 24926 |
|
}, |
|
{ |
|
"epoch": 22.979933847850056, |
|
"grad_norm": 2.1460916996002197, |
|
"learning_rate": 7.701323918799648e-06, |
|
"loss": 1.716, |
|
"step": 26059 |
|
}, |
|
{ |
|
"epoch": 23.979051819184125, |
|
"grad_norm": 2.6795060634613037, |
|
"learning_rate": 7.601323918799648e-06, |
|
"loss": 1.71, |
|
"step": 27192 |
|
}, |
|
{ |
|
"epoch": 24.97816979051819, |
|
"grad_norm": 4.405874729156494, |
|
"learning_rate": 7.501412180052957e-06, |
|
"loss": 1.7143, |
|
"step": 28325 |
|
}, |
|
{ |
|
"epoch": 25.97728776185226, |
|
"grad_norm": 2.3188281059265137, |
|
"learning_rate": 7.401412180052957e-06, |
|
"loss": 1.6879, |
|
"step": 29458 |
|
}, |
|
{ |
|
"epoch": 26.97640573318633, |
|
"grad_norm": 4.247509002685547, |
|
"learning_rate": 7.301412180052957e-06, |
|
"loss": 1.7008, |
|
"step": 30591 |
|
}, |
|
{ |
|
"epoch": 27.9755237045204, |
|
"grad_norm": 5.176025390625, |
|
"learning_rate": 7.2015004413062674e-06, |
|
"loss": 1.6904, |
|
"step": 31724 |
|
}, |
|
{ |
|
"epoch": 28.974641675854464, |
|
"grad_norm": 3.4096691608428955, |
|
"learning_rate": 7.1015004413062675e-06, |
|
"loss": 1.6884, |
|
"step": 32857 |
|
}, |
|
{ |
|
"epoch": 29.973759647188533, |
|
"grad_norm": 5.346144676208496, |
|
"learning_rate": 7.001588702559578e-06, |
|
"loss": 1.6874, |
|
"step": 33990 |
|
}, |
|
{ |
|
"epoch": 30.972877618522602, |
|
"grad_norm": 4.809396743774414, |
|
"learning_rate": 6.901588702559577e-06, |
|
"loss": 1.6834, |
|
"step": 35123 |
|
}, |
|
{ |
|
"epoch": 31.97199558985667, |
|
"grad_norm": 4.950044631958008, |
|
"learning_rate": 6.8016769638128865e-06, |
|
"loss": 1.6704, |
|
"step": 36256 |
|
}, |
|
{ |
|
"epoch": 32.97111356119074, |
|
"grad_norm": 2.259593963623047, |
|
"learning_rate": 6.701676963812887e-06, |
|
"loss": 1.676, |
|
"step": 37389 |
|
}, |
|
{ |
|
"epoch": 33.97023153252481, |
|
"grad_norm": 1.9857131242752075, |
|
"learning_rate": 6.601765225066197e-06, |
|
"loss": 1.6721, |
|
"step": 38522 |
|
}, |
|
{ |
|
"epoch": 34.96934950385887, |
|
"grad_norm": 2.6523988246917725, |
|
"learning_rate": 6.501765225066197e-06, |
|
"loss": 1.6519, |
|
"step": 39655 |
|
}, |
|
{ |
|
"epoch": 35.968467475192945, |
|
"grad_norm": 2.5178589820861816, |
|
"learning_rate": 6.401853486319506e-06, |
|
"loss": 1.6763, |
|
"step": 40788 |
|
}, |
|
{ |
|
"epoch": 36.96758544652701, |
|
"grad_norm": 3.966003894805908, |
|
"learning_rate": 6.301941747572816e-06, |
|
"loss": 1.6729, |
|
"step": 41921 |
|
}, |
|
{ |
|
"epoch": 37.96670341786108, |
|
"grad_norm": 2.874756336212158, |
|
"learning_rate": 6.201941747572816e-06, |
|
"loss": 1.6761, |
|
"step": 43054 |
|
}, |
|
{ |
|
"epoch": 38.96582138919515, |
|
"grad_norm": 5.206564426422119, |
|
"learning_rate": 6.102030008826125e-06, |
|
"loss": 1.6731, |
|
"step": 44187 |
|
}, |
|
{ |
|
"epoch": 39.964939360529215, |
|
"grad_norm": 3.3622446060180664, |
|
"learning_rate": 6.002030008826126e-06, |
|
"loss": 1.6649, |
|
"step": 45320 |
|
}, |
|
{ |
|
"epoch": 40.96405733186329, |
|
"grad_norm": 2.243980884552002, |
|
"learning_rate": 5.902118270079436e-06, |
|
"loss": 1.6617, |
|
"step": 46453 |
|
}, |
|
{ |
|
"epoch": 41.96317530319735, |
|
"grad_norm": 2.656219720840454, |
|
"learning_rate": 5.802118270079435e-06, |
|
"loss": 1.6539, |
|
"step": 47586 |
|
}, |
|
{ |
|
"epoch": 42.96229327453142, |
|
"grad_norm": 2.1969571113586426, |
|
"learning_rate": 5.702206531332745e-06, |
|
"loss": 1.6507, |
|
"step": 48719 |
|
}, |
|
{ |
|
"epoch": 43.96141124586549, |
|
"grad_norm": 2.570960760116577, |
|
"learning_rate": 5.602294792586055e-06, |
|
"loss": 1.6517, |
|
"step": 49852 |
|
}, |
|
{ |
|
"epoch": 44.96052921719956, |
|
"grad_norm": 2.2769925594329834, |
|
"learning_rate": 5.502294792586055e-06, |
|
"loss": 1.6622, |
|
"step": 50985 |
|
}, |
|
{ |
|
"epoch": 45.95964718853363, |
|
"grad_norm": 1.3121511936187744, |
|
"learning_rate": 5.402383053839365e-06, |
|
"loss": 1.6583, |
|
"step": 52118 |
|
}, |
|
{ |
|
"epoch": 46.958765159867696, |
|
"grad_norm": 2.186378002166748, |
|
"learning_rate": 5.302383053839365e-06, |
|
"loss": 1.6579, |
|
"step": 53251 |
|
}, |
|
{ |
|
"epoch": 47.95788313120176, |
|
"grad_norm": 1.257639765739441, |
|
"learning_rate": 5.202471315092674e-06, |
|
"loss": 1.653, |
|
"step": 54384 |
|
}, |
|
{ |
|
"epoch": 48.957001102535834, |
|
"grad_norm": 2.570171594619751, |
|
"learning_rate": 5.102471315092676e-06, |
|
"loss": 1.6503, |
|
"step": 55517 |
|
}, |
|
{ |
|
"epoch": 49.9561190738699, |
|
"grad_norm": 1.8046927452087402, |
|
"learning_rate": 5.002559576345984e-06, |
|
"loss": 1.6569, |
|
"step": 56650 |
|
}, |
|
{ |
|
"epoch": 50.95523704520397, |
|
"grad_norm": 2.055699348449707, |
|
"learning_rate": 4.9026478375992945e-06, |
|
"loss": 1.649, |
|
"step": 57783 |
|
}, |
|
{ |
|
"epoch": 51.95435501653804, |
|
"grad_norm": 1.5541220903396606, |
|
"learning_rate": 4.802647837599294e-06, |
|
"loss": 1.6463, |
|
"step": 58916 |
|
}, |
|
{ |
|
"epoch": 52.953472987872104, |
|
"grad_norm": 1.3364524841308594, |
|
"learning_rate": 4.702736098852604e-06, |
|
"loss": 1.6406, |
|
"step": 60049 |
|
}, |
|
{ |
|
"epoch": 53.95259095920618, |
|
"grad_norm": 1.8428481817245483, |
|
"learning_rate": 4.602736098852604e-06, |
|
"loss": 1.6443, |
|
"step": 61182 |
|
}, |
|
{ |
|
"epoch": 54.95170893054024, |
|
"grad_norm": 1.3728575706481934, |
|
"learning_rate": 4.5028243601059144e-06, |
|
"loss": 1.6427, |
|
"step": 62315 |
|
}, |
|
{ |
|
"epoch": 55.95082690187431, |
|
"grad_norm": 2.039762496948242, |
|
"learning_rate": 4.402824360105914e-06, |
|
"loss": 1.6523, |
|
"step": 63448 |
|
}, |
|
{ |
|
"epoch": 56.94994487320838, |
|
"grad_norm": 0.4440517723560333, |
|
"learning_rate": 4.302912621359224e-06, |
|
"loss": 1.6346, |
|
"step": 64581 |
|
}, |
|
{ |
|
"epoch": 57.949062844542446, |
|
"grad_norm": 2.8719191551208496, |
|
"learning_rate": 4.202912621359223e-06, |
|
"loss": 1.6481, |
|
"step": 65714 |
|
}, |
|
{ |
|
"epoch": 58.94818081587652, |
|
"grad_norm": 2.116090774536133, |
|
"learning_rate": 4.1030008826125335e-06, |
|
"loss": 1.6403, |
|
"step": 66847 |
|
}, |
|
{ |
|
"epoch": 59.947298787210585, |
|
"grad_norm": 2.5801730155944824, |
|
"learning_rate": 4.003089143865843e-06, |
|
"loss": 1.6445, |
|
"step": 67980 |
|
}, |
|
{ |
|
"epoch": 60.94641675854465, |
|
"grad_norm": 1.5309211015701294, |
|
"learning_rate": 3.903089143865843e-06, |
|
"loss": 1.6495, |
|
"step": 69113 |
|
}, |
|
{ |
|
"epoch": 61.94553472987872, |
|
"grad_norm": 2.1777145862579346, |
|
"learning_rate": 3.803177405119153e-06, |
|
"loss": 1.6477, |
|
"step": 70246 |
|
}, |
|
{ |
|
"epoch": 62.94465270121279, |
|
"grad_norm": 1.003503441810608, |
|
"learning_rate": 3.7031774051191535e-06, |
|
"loss": 1.6387, |
|
"step": 71379 |
|
}, |
|
{ |
|
"epoch": 63.943770672546854, |
|
"grad_norm": 2.015745162963867, |
|
"learning_rate": 3.603265666372463e-06, |
|
"loss": 1.6307, |
|
"step": 72512 |
|
}, |
|
{ |
|
"epoch": 64.94288864388092, |
|
"grad_norm": 2.482320547103882, |
|
"learning_rate": 3.503265666372463e-06, |
|
"loss": 1.6402, |
|
"step": 73645 |
|
}, |
|
{ |
|
"epoch": 65.94200661521499, |
|
"grad_norm": 2.404247999191284, |
|
"learning_rate": 3.403353927625773e-06, |
|
"loss": 1.6409, |
|
"step": 74778 |
|
}, |
|
{ |
|
"epoch": 66.94112458654907, |
|
"grad_norm": 1.197210669517517, |
|
"learning_rate": 3.3034421888790823e-06, |
|
"loss": 1.6518, |
|
"step": 75911 |
|
}, |
|
{ |
|
"epoch": 67.94024255788312, |
|
"grad_norm": 0.6208453178405762, |
|
"learning_rate": 3.2034421888790824e-06, |
|
"loss": 1.6461, |
|
"step": 77044 |
|
}, |
|
{ |
|
"epoch": 68.9393605292172, |
|
"grad_norm": 1.967084527015686, |
|
"learning_rate": 3.1035304501323922e-06, |
|
"loss": 1.6393, |
|
"step": 78177 |
|
}, |
|
{ |
|
"epoch": 69.93847850055127, |
|
"grad_norm": 1.0003052949905396, |
|
"learning_rate": 3.003530450132392e-06, |
|
"loss": 1.6259, |
|
"step": 79310 |
|
}, |
|
{ |
|
"epoch": 70.93759647188534, |
|
"grad_norm": 0.6473856568336487, |
|
"learning_rate": 2.9036187113857018e-06, |
|
"loss": 1.6302, |
|
"step": 80443 |
|
}, |
|
{ |
|
"epoch": 71.9367144432194, |
|
"grad_norm": 0.949741780757904, |
|
"learning_rate": 2.8036187113857015e-06, |
|
"loss": 1.6355, |
|
"step": 81576 |
|
}, |
|
{ |
|
"epoch": 72.93583241455347, |
|
"grad_norm": 1.2901132106781006, |
|
"learning_rate": 2.7037069726390117e-06, |
|
"loss": 1.6335, |
|
"step": 82709 |
|
}, |
|
{ |
|
"epoch": 73.93495038588755, |
|
"grad_norm": 1.8243396282196045, |
|
"learning_rate": 2.6037952338923216e-06, |
|
"loss": 1.6329, |
|
"step": 83842 |
|
}, |
|
{ |
|
"epoch": 74.9340683572216, |
|
"grad_norm": 0.6137579679489136, |
|
"learning_rate": 2.5037952338923217e-06, |
|
"loss": 1.6421, |
|
"step": 84975 |
|
}, |
|
{ |
|
"epoch": 75.93318632855568, |
|
"grad_norm": 1.270875096321106, |
|
"learning_rate": 2.403883495145631e-06, |
|
"loss": 1.628, |
|
"step": 86108 |
|
}, |
|
{ |
|
"epoch": 76.93230429988975, |
|
"grad_norm": 0.9131399393081665, |
|
"learning_rate": 2.3038834951456313e-06, |
|
"loss": 1.6235, |
|
"step": 87241 |
|
}, |
|
{ |
|
"epoch": 77.93142227122381, |
|
"grad_norm": 1.042668342590332, |
|
"learning_rate": 2.203971756398941e-06, |
|
"loss": 1.6323, |
|
"step": 88374 |
|
}, |
|
{ |
|
"epoch": 78.93054024255788, |
|
"grad_norm": 2.174466609954834, |
|
"learning_rate": 2.103971756398941e-06, |
|
"loss": 1.6263, |
|
"step": 89507 |
|
}, |
|
{ |
|
"epoch": 79.92965821389195, |
|
"grad_norm": 0.5852002501487732, |
|
"learning_rate": 2.004060017652251e-06, |
|
"loss": 1.6354, |
|
"step": 90640 |
|
}, |
|
{ |
|
"epoch": 80.92877618522601, |
|
"grad_norm": 0.41708850860595703, |
|
"learning_rate": 1.9040600176522508e-06, |
|
"loss": 1.6177, |
|
"step": 91773 |
|
}, |
|
{ |
|
"epoch": 81.92789415656009, |
|
"grad_norm": 1.2882851362228394, |
|
"learning_rate": 1.8041482789055606e-06, |
|
"loss": 1.6317, |
|
"step": 92906 |
|
}, |
|
{ |
|
"epoch": 82.92701212789416, |
|
"grad_norm": 1.3567571640014648, |
|
"learning_rate": 1.7042365401588703e-06, |
|
"loss": 1.6336, |
|
"step": 94039 |
|
}, |
|
{ |
|
"epoch": 83.92613009922823, |
|
"grad_norm": 1.7891902923583984, |
|
"learning_rate": 1.6042365401588702e-06, |
|
"loss": 1.6285, |
|
"step": 95172 |
|
}, |
|
{ |
|
"epoch": 84.92524807056229, |
|
"grad_norm": 1.0167814493179321, |
|
"learning_rate": 1.5043248014121802e-06, |
|
"loss": 1.6233, |
|
"step": 96305 |
|
}, |
|
{ |
|
"epoch": 85.92436604189636, |
|
"grad_norm": 0.6127368211746216, |
|
"learning_rate": 1.4043248014121801e-06, |
|
"loss": 1.6194, |
|
"step": 97438 |
|
}, |
|
{ |
|
"epoch": 86.92348401323044, |
|
"grad_norm": 0.4271755516529083, |
|
"learning_rate": 1.30441306266549e-06, |
|
"loss": 1.6292, |
|
"step": 98571 |
|
}, |
|
{ |
|
"epoch": 87.9226019845645, |
|
"grad_norm": 0.6342440843582153, |
|
"learning_rate": 1.20441306266549e-06, |
|
"loss": 1.6266, |
|
"step": 99704 |
|
}, |
|
{ |
|
"epoch": 88.92171995589857, |
|
"grad_norm": 0.4804386794567108, |
|
"learning_rate": 1.1045013239187997e-06, |
|
"loss": 1.624, |
|
"step": 100837 |
|
}, |
|
{ |
|
"epoch": 89.92083792723264, |
|
"grad_norm": 0.6205073595046997, |
|
"learning_rate": 1.0045895851721096e-06, |
|
"loss": 1.6225, |
|
"step": 101970 |
|
}, |
|
{ |
|
"epoch": 90.9199558985667, |
|
"grad_norm": 0.5994776487350464, |
|
"learning_rate": 9.045895851721096e-07, |
|
"loss": 1.6296, |
|
"step": 103103 |
|
}, |
|
{ |
|
"epoch": 91.91907386990077, |
|
"grad_norm": 1.454080581665039, |
|
"learning_rate": 8.046778464254193e-07, |
|
"loss": 1.6254, |
|
"step": 104236 |
|
}, |
|
{ |
|
"epoch": 92.91819184123484, |
|
"grad_norm": 0.8811701536178589, |
|
"learning_rate": 7.046778464254193e-07, |
|
"loss": 1.6295, |
|
"step": 105369 |
|
}, |
|
{ |
|
"epoch": 93.9173098125689, |
|
"grad_norm": 0.6332120895385742, |
|
"learning_rate": 6.047661076787291e-07, |
|
"loss": 1.6269, |
|
"step": 106502 |
|
}, |
|
{ |
|
"epoch": 94.91642778390298, |
|
"grad_norm": 1.2232398986816406, |
|
"learning_rate": 5.047661076787291e-07, |
|
"loss": 1.6254, |
|
"step": 107635 |
|
}, |
|
{ |
|
"epoch": 95.91554575523705, |
|
"grad_norm": 0.9554293751716614, |
|
"learning_rate": 4.048543689320389e-07, |
|
"loss": 1.6307, |
|
"step": 108768 |
|
}, |
|
{ |
|
"epoch": 96.91466372657112, |
|
"grad_norm": 0.42345738410949707, |
|
"learning_rate": 3.0485436893203884e-07, |
|
"loss": 1.6314, |
|
"step": 109901 |
|
}, |
|
{ |
|
"epoch": 97.91378169790518, |
|
"grad_norm": 0.4951033890247345, |
|
"learning_rate": 2.0494263018534864e-07, |
|
"loss": 1.6341, |
|
"step": 111034 |
|
}, |
|
{ |
|
"epoch": 98.91289966923925, |
|
"grad_norm": 0.40377479791641235, |
|
"learning_rate": 1.0503089143865844e-07, |
|
"loss": 1.6174, |
|
"step": 112167 |
|
}, |
|
{ |
|
"epoch": 99.91201764057332, |
|
"grad_norm": 0.5117936730384827, |
|
"learning_rate": 5.03089143865843e-09, |
|
"loss": 1.6313, |
|
"step": 113300 |
|
}, |
|
{ |
|
"epoch": 99.91201764057332, |
|
"step": 113300, |
|
"total_flos": 5.364383961180322e+18, |
|
"train_loss": 1.6900030501417007, |
|
"train_runtime": 96835.149, |
|
"train_samples_per_second": 140.49, |
|
"train_steps_per_second": 1.17 |
|
} |
|
], |
|
"logging_steps": 1133, |
|
"max_steps": 113300, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 100, |
|
"save_steps": 6800, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.364383961180322e+18, |
|
"train_batch_size": 30, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|