|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9987760097919217, |
|
"eval_steps": 500, |
|
"global_step": 204, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004895960832313341, |
|
"grad_norm": 0.03658522292971611, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.4365, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009791921664626682, |
|
"grad_norm": 0.035473328083753586, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 0.6677, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.014687882496940025, |
|
"grad_norm": 0.030256139114499092, |
|
"learning_rate": 2.7272727272727273e-05, |
|
"loss": 0.4084, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.019583843329253364, |
|
"grad_norm": 0.04286854714155197, |
|
"learning_rate": 3.6363636363636364e-05, |
|
"loss": 0.3422, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02447980416156671, |
|
"grad_norm": 0.06271344423294067, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 0.5332, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02937576499388005, |
|
"grad_norm": 0.09929833561182022, |
|
"learning_rate": 5.4545454545454546e-05, |
|
"loss": 0.7114, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03427172582619339, |
|
"grad_norm": 0.08789193630218506, |
|
"learning_rate": 6.363636363636364e-05, |
|
"loss": 0.5457, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03916768665850673, |
|
"grad_norm": 0.0518026277422905, |
|
"learning_rate": 7.272727272727273e-05, |
|
"loss": 0.3846, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.044063647490820076, |
|
"grad_norm": 0.042549822479486465, |
|
"learning_rate": 8.181818181818183e-05, |
|
"loss": 0.5076, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04895960832313342, |
|
"grad_norm": 0.051444098353385925, |
|
"learning_rate": 9.090909090909092e-05, |
|
"loss": 0.4675, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05385556915544676, |
|
"grad_norm": 0.08935613930225372, |
|
"learning_rate": 0.0001, |
|
"loss": 0.3412, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0587515299877601, |
|
"grad_norm": 0.10168597102165222, |
|
"learning_rate": 9.99933760728612e-05, |
|
"loss": 0.4016, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.06364749082007344, |
|
"grad_norm": 0.1793641448020935, |
|
"learning_rate": 9.997350604650123e-05, |
|
"loss": 0.3738, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06854345165238677, |
|
"grad_norm": 0.24378074705600739, |
|
"learning_rate": 9.994039518562432e-05, |
|
"loss": 0.5781, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.07343941248470012, |
|
"grad_norm": 0.08958296477794647, |
|
"learning_rate": 9.989405226318772e-05, |
|
"loss": 0.4275, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07833537331701346, |
|
"grad_norm": 0.16461962461471558, |
|
"learning_rate": 9.983448955807708e-05, |
|
"loss": 0.4416, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0832313341493268, |
|
"grad_norm": 0.2743546664714813, |
|
"learning_rate": 9.976172285185314e-05, |
|
"loss": 0.5598, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08812729498164015, |
|
"grad_norm": 0.05548033118247986, |
|
"learning_rate": 9.967577142457032e-05, |
|
"loss": 0.3509, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.09302325581395349, |
|
"grad_norm": 0.10299059748649597, |
|
"learning_rate": 9.957665804966829e-05, |
|
"loss": 0.7287, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.09791921664626684, |
|
"grad_norm": 0.1017536073923111, |
|
"learning_rate": 9.946440898793801e-05, |
|
"loss": 0.638, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.10281517747858017, |
|
"grad_norm": 0.09810858964920044, |
|
"learning_rate": 9.933905398056372e-05, |
|
"loss": 0.4959, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.10771113831089352, |
|
"grad_norm": 0.07071840763092041, |
|
"learning_rate": 9.920062624124282e-05, |
|
"loss": 0.5835, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.11260709914320685, |
|
"grad_norm": 0.23103618621826172, |
|
"learning_rate": 9.904916244738571e-05, |
|
"loss": 0.7785, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.1175030599755202, |
|
"grad_norm": 0.11760212481021881, |
|
"learning_rate": 9.888470273039775e-05, |
|
"loss": 0.3422, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.12239902080783353, |
|
"grad_norm": 0.07370053976774216, |
|
"learning_rate": 9.870729066504629e-05, |
|
"loss": 0.3107, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.12729498164014688, |
|
"grad_norm": 0.07285131514072418, |
|
"learning_rate": 9.851697325791505e-05, |
|
"loss": 0.4473, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.13219094247246022, |
|
"grad_norm": 0.05355285108089447, |
|
"learning_rate": 9.831380093494957e-05, |
|
"loss": 0.3368, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.13708690330477355, |
|
"grad_norm": 0.07507691532373428, |
|
"learning_rate": 9.809782752809644e-05, |
|
"loss": 0.3709, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.1419828641370869, |
|
"grad_norm": 0.055260878056287766, |
|
"learning_rate": 9.786911026104007e-05, |
|
"loss": 0.4986, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.14687882496940025, |
|
"grad_norm": 0.16646364331245422, |
|
"learning_rate": 9.762770973404094e-05, |
|
"loss": 0.6252, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.15177478580171358, |
|
"grad_norm": 0.22445577383041382, |
|
"learning_rate": 9.737368990787916e-05, |
|
"loss": 0.4772, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.15667074663402691, |
|
"grad_norm": 0.22849321365356445, |
|
"learning_rate": 9.710711808690754e-05, |
|
"loss": 0.381, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.16156670746634028, |
|
"grad_norm": 0.05855480954051018, |
|
"learning_rate": 9.682806490121885e-05, |
|
"loss": 0.3429, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.1664626682986536, |
|
"grad_norm": 0.06263675540685654, |
|
"learning_rate": 9.653660428793188e-05, |
|
"loss": 0.3927, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.17135862913096694, |
|
"grad_norm": 0.277313232421875, |
|
"learning_rate": 9.623281347160127e-05, |
|
"loss": 0.9417, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1762545899632803, |
|
"grad_norm": 0.07143048942089081, |
|
"learning_rate": 9.591677294375636e-05, |
|
"loss": 0.4428, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.18115055079559364, |
|
"grad_norm": 0.07706379145383835, |
|
"learning_rate": 9.558856644157432e-05, |
|
"loss": 0.3614, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.18604651162790697, |
|
"grad_norm": 0.25005754828453064, |
|
"learning_rate": 9.52482809256934e-05, |
|
"loss": 0.6474, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1909424724602203, |
|
"grad_norm": 0.06337836384773254, |
|
"learning_rate": 9.489600655717217e-05, |
|
"loss": 0.4504, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.19583843329253367, |
|
"grad_norm": 0.21187523007392883, |
|
"learning_rate": 9.453183667360062e-05, |
|
"loss": 0.6336, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.200734394124847, |
|
"grad_norm": 0.1760697066783905, |
|
"learning_rate": 9.415586776436973e-05, |
|
"loss": 0.5665, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.20563035495716034, |
|
"grad_norm": 0.10999207943677902, |
|
"learning_rate": 9.376819944510598e-05, |
|
"loss": 0.3929, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 0.09944123774766922, |
|
"learning_rate": 9.336893443127738e-05, |
|
"loss": 0.4195, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.21542227662178703, |
|
"grad_norm": 0.1161671131849289, |
|
"learning_rate": 9.295817851097837e-05, |
|
"loss": 0.4643, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.22031823745410037, |
|
"grad_norm": 0.11634702235460281, |
|
"learning_rate": 9.253604051690046e-05, |
|
"loss": 0.5375, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2252141982864137, |
|
"grad_norm": 0.07312487810850143, |
|
"learning_rate": 9.210263229749626e-05, |
|
"loss": 0.3291, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.23011015911872704, |
|
"grad_norm": 0.08545304834842682, |
|
"learning_rate": 9.165806868734444e-05, |
|
"loss": 0.5543, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.2350061199510404, |
|
"grad_norm": 0.15719719231128693, |
|
"learning_rate": 9.120246747672347e-05, |
|
"loss": 0.7045, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.23990208078335373, |
|
"grad_norm": 0.11078042536973953, |
|
"learning_rate": 9.073594938040231e-05, |
|
"loss": 0.572, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.24479804161566707, |
|
"grad_norm": 0.1860225647687912, |
|
"learning_rate": 9.025863800565613e-05, |
|
"loss": 0.7102, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.24969400244798043, |
|
"grad_norm": 0.11442095041275024, |
|
"learning_rate": 8.977065981951566e-05, |
|
"loss": 0.5135, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.25458996328029376, |
|
"grad_norm": 0.17788943648338318, |
|
"learning_rate": 8.927214411525895e-05, |
|
"loss": 0.4911, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.2594859241126071, |
|
"grad_norm": 0.24241389334201813, |
|
"learning_rate": 8.876322297815405e-05, |
|
"loss": 0.6657, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.26438188494492043, |
|
"grad_norm": 0.07881529629230499, |
|
"learning_rate": 8.824403125046225e-05, |
|
"loss": 0.4495, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2692778457772338, |
|
"grad_norm": 0.12150023877620697, |
|
"learning_rate": 8.771470649571056e-05, |
|
"loss": 0.3651, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.2741738066095471, |
|
"grad_norm": 0.07412393391132355, |
|
"learning_rate": 8.717538896224332e-05, |
|
"loss": 0.3277, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.27906976744186046, |
|
"grad_norm": 0.06195759400725365, |
|
"learning_rate": 8.662622154606237e-05, |
|
"loss": 0.361, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2839657282741738, |
|
"grad_norm": 0.06260855495929718, |
|
"learning_rate": 8.606734975296578e-05, |
|
"loss": 0.4541, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.28886168910648713, |
|
"grad_norm": 0.22213032841682434, |
|
"learning_rate": 8.549892165999505e-05, |
|
"loss": 0.4133, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2937576499388005, |
|
"grad_norm": 0.08584744483232498, |
|
"learning_rate": 8.492108787620105e-05, |
|
"loss": 0.3445, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.29865361077111385, |
|
"grad_norm": 0.07675541192293167, |
|
"learning_rate": 8.433400150273906e-05, |
|
"loss": 0.4518, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.30354957160342716, |
|
"grad_norm": 0.07371577620506287, |
|
"learning_rate": 8.373781809230355e-05, |
|
"loss": 0.308, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.3084455324357405, |
|
"grad_norm": 0.10943123698234558, |
|
"learning_rate": 8.313269560791342e-05, |
|
"loss": 0.7011, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.31334149326805383, |
|
"grad_norm": 0.07427150011062622, |
|
"learning_rate": 8.251879438105854e-05, |
|
"loss": 0.3193, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.3182374541003672, |
|
"grad_norm": 0.06212422996759415, |
|
"learning_rate": 8.189627706921877e-05, |
|
"loss": 0.3383, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.32313341493268055, |
|
"grad_norm": 0.1618916094303131, |
|
"learning_rate": 8.126530861276677e-05, |
|
"loss": 0.5653, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.32802937576499386, |
|
"grad_norm": 0.06333769857883453, |
|
"learning_rate": 8.062605619126584e-05, |
|
"loss": 0.3395, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3329253365973072, |
|
"grad_norm": 0.16169676184654236, |
|
"learning_rate": 7.997868917917453e-05, |
|
"loss": 0.7753, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.3378212974296206, |
|
"grad_norm": 0.06635843217372894, |
|
"learning_rate": 7.932337910096961e-05, |
|
"loss": 0.3176, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.3427172582619339, |
|
"grad_norm": 0.16557282209396362, |
|
"learning_rate": 7.866029958569956e-05, |
|
"loss": 0.4603, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.34761321909424725, |
|
"grad_norm": 0.08522031456232071, |
|
"learning_rate": 7.798962632098024e-05, |
|
"loss": 0.5636, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3525091799265606, |
|
"grad_norm": 0.05583783611655235, |
|
"learning_rate": 7.73115370064452e-05, |
|
"loss": 0.2692, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3574051407588739, |
|
"grad_norm": 0.08181650191545486, |
|
"learning_rate": 7.6626211306663e-05, |
|
"loss": 0.6908, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3623011015911873, |
|
"grad_norm": 0.07728707045316696, |
|
"learning_rate": 7.59338308035337e-05, |
|
"loss": 0.4046, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3671970624235006, |
|
"grad_norm": 0.09214276075363159, |
|
"learning_rate": 7.523457894817745e-05, |
|
"loss": 0.816, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.37209302325581395, |
|
"grad_norm": 0.07536876946687698, |
|
"learning_rate": 7.452864101232798e-05, |
|
"loss": 0.334, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3769889840881273, |
|
"grad_norm": 0.09890823811292648, |
|
"learning_rate": 7.381620403924333e-05, |
|
"loss": 0.4769, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3818849449204406, |
|
"grad_norm": 0.09237520396709442, |
|
"learning_rate": 7.30974567941475e-05, |
|
"loss": 0.4149, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.386780905752754, |
|
"grad_norm": 0.14629222452640533, |
|
"learning_rate": 7.237258971421587e-05, |
|
"loss": 0.9113, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.39167686658506734, |
|
"grad_norm": 0.0960475504398346, |
|
"learning_rate": 7.164179485811727e-05, |
|
"loss": 0.4643, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.39657282741738065, |
|
"grad_norm": 0.07956342399120331, |
|
"learning_rate": 7.090526585512696e-05, |
|
"loss": 0.3902, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.401468788249694, |
|
"grad_norm": 0.08580750226974487, |
|
"learning_rate": 7.016319785382296e-05, |
|
"loss": 0.5235, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.40636474908200737, |
|
"grad_norm": 0.12661615014076233, |
|
"learning_rate": 6.941578747038023e-05, |
|
"loss": 0.4481, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.4112607099143207, |
|
"grad_norm": 0.0649070218205452, |
|
"learning_rate": 6.866323273647563e-05, |
|
"loss": 0.4189, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.41615667074663404, |
|
"grad_norm": 0.1020541861653328, |
|
"learning_rate": 6.79057330468182e-05, |
|
"loss": 0.4533, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 0.10127013921737671, |
|
"learning_rate": 6.7143489106318e-05, |
|
"loss": 0.522, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.4259485924112607, |
|
"grad_norm": 0.14281871914863586, |
|
"learning_rate": 6.637670287690799e-05, |
|
"loss": 0.4772, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.43084455324357407, |
|
"grad_norm": 0.065117247402668, |
|
"learning_rate": 6.560557752403277e-05, |
|
"loss": 0.5043, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.4357405140758874, |
|
"grad_norm": 0.12292537093162537, |
|
"learning_rate": 6.483031736281843e-05, |
|
"loss": 0.4375, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.44063647490820074, |
|
"grad_norm": 0.06215154007077217, |
|
"learning_rate": 6.40511278039378e-05, |
|
"loss": 0.3519, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4455324357405141, |
|
"grad_norm": 0.070677250623703, |
|
"learning_rate": 6.326821529918553e-05, |
|
"loss": 0.3407, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.4504283965728274, |
|
"grad_norm": 0.06239693611860275, |
|
"learning_rate": 6.248178728677711e-05, |
|
"loss": 0.3799, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.45532435740514077, |
|
"grad_norm": 0.07950470596551895, |
|
"learning_rate": 6.16920521363867e-05, |
|
"loss": 0.3063, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.4602203182374541, |
|
"grad_norm": 0.06621692329645157, |
|
"learning_rate": 6.089921909393812e-05, |
|
"loss": 0.4386, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.46511627906976744, |
|
"grad_norm": 0.3681151270866394, |
|
"learning_rate": 6.0103498226163603e-05, |
|
"loss": 0.5919, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4700122399020808, |
|
"grad_norm": 0.20958742499351501, |
|
"learning_rate": 5.93051003649452e-05, |
|
"loss": 0.4636, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.4749082007343941, |
|
"grad_norm": 0.09456545114517212, |
|
"learning_rate": 5.850423705145334e-05, |
|
"loss": 0.4564, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.47980416156670747, |
|
"grad_norm": 0.06202203407883644, |
|
"learning_rate": 5.770112048009747e-05, |
|
"loss": 0.3652, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4847001223990208, |
|
"grad_norm": 0.11682210117578506, |
|
"learning_rate": 5.68959634423037e-05, |
|
"loss": 0.5179, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.48959608323133413, |
|
"grad_norm": 0.11552639305591583, |
|
"learning_rate": 5.60889792701342e-05, |
|
"loss": 0.467, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4944920440636475, |
|
"grad_norm": 0.053526621311903, |
|
"learning_rate": 5.52803817797633e-05, |
|
"loss": 0.2341, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.49938800489596086, |
|
"grad_norm": 0.11124971508979797, |
|
"learning_rate": 5.4470385214825416e-05, |
|
"loss": 0.5641, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.5042839657282742, |
|
"grad_norm": 0.1113283783197403, |
|
"learning_rate": 5.365920418964973e-05, |
|
"loss": 0.438, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.5091799265605875, |
|
"grad_norm": 0.0814870297908783, |
|
"learning_rate": 5.28470536323965e-05, |
|
"loss": 0.4562, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.5140758873929009, |
|
"grad_norm": 0.09781132638454437, |
|
"learning_rate": 5.2034148728110424e-05, |
|
"loss": 0.5587, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.5189718482252142, |
|
"grad_norm": 0.11908842623233795, |
|
"learning_rate": 5.1220704861705774e-05, |
|
"loss": 0.7554, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.5238678090575275, |
|
"grad_norm": 0.06143520399928093, |
|
"learning_rate": 5.0406937560898646e-05, |
|
"loss": 0.2544, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.5287637698898409, |
|
"grad_norm": 0.08653257042169571, |
|
"learning_rate": 4.9593062439101365e-05, |
|
"loss": 0.5423, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.5336597307221542, |
|
"grad_norm": 0.07373999804258347, |
|
"learning_rate": 4.877929513829424e-05, |
|
"loss": 0.3577, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.5385556915544676, |
|
"grad_norm": 0.2530810236930847, |
|
"learning_rate": 4.796585127188958e-05, |
|
"loss": 0.454, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.543451652386781, |
|
"grad_norm": 0.0999133363366127, |
|
"learning_rate": 4.715294636760352e-05, |
|
"loss": 0.4601, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5483476132190942, |
|
"grad_norm": 0.07089534401893616, |
|
"learning_rate": 4.634079581035029e-05, |
|
"loss": 0.2958, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5532435740514076, |
|
"grad_norm": 0.15452376008033752, |
|
"learning_rate": 4.55296147851746e-05, |
|
"loss": 0.5048, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5581395348837209, |
|
"grad_norm": 0.12181607633829117, |
|
"learning_rate": 4.471961822023671e-05, |
|
"loss": 0.5176, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.5630354957160343, |
|
"grad_norm": 0.07680846750736237, |
|
"learning_rate": 4.391102072986581e-05, |
|
"loss": 0.2968, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5679314565483476, |
|
"grad_norm": 0.071733757853508, |
|
"learning_rate": 4.3104036557696295e-05, |
|
"loss": 0.6076, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.572827417380661, |
|
"grad_norm": 0.07903078943490982, |
|
"learning_rate": 4.229887951990255e-05, |
|
"loss": 0.5459, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5777233782129743, |
|
"grad_norm": 0.09677067399024963, |
|
"learning_rate": 4.149576294854668e-05, |
|
"loss": 0.44, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.5826193390452876, |
|
"grad_norm": 0.09369610249996185, |
|
"learning_rate": 4.069489963505482e-05, |
|
"loss": 0.6845, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.587515299877601, |
|
"grad_norm": 0.06507845222949982, |
|
"learning_rate": 3.98965017738364e-05, |
|
"loss": 0.3054, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5924112607099143, |
|
"grad_norm": 0.1210411936044693, |
|
"learning_rate": 3.9100780906061896e-05, |
|
"loss": 0.4176, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5973072215422277, |
|
"grad_norm": 0.07214700430631638, |
|
"learning_rate": 3.83079478636133e-05, |
|
"loss": 0.4256, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.602203182374541, |
|
"grad_norm": 0.07553218305110931, |
|
"learning_rate": 3.7518212713222906e-05, |
|
"loss": 0.5853, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.6070991432068543, |
|
"grad_norm": 0.08783773332834244, |
|
"learning_rate": 3.673178470081448e-05, |
|
"loss": 0.5636, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.6119951040391677, |
|
"grad_norm": 0.07783231139183044, |
|
"learning_rate": 3.594887219606221e-05, |
|
"loss": 0.6704, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.616891064871481, |
|
"grad_norm": 0.14530447125434875, |
|
"learning_rate": 3.516968263718159e-05, |
|
"loss": 0.4881, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.6217870257037944, |
|
"grad_norm": 0.23401491343975067, |
|
"learning_rate": 3.439442247596724e-05, |
|
"loss": 0.4918, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.6266829865361077, |
|
"grad_norm": 0.07801464200019836, |
|
"learning_rate": 3.3623297123092006e-05, |
|
"loss": 0.5203, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.631578947368421, |
|
"grad_norm": 0.1613989621400833, |
|
"learning_rate": 3.285651089368202e-05, |
|
"loss": 0.7026, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.6364749082007344, |
|
"grad_norm": 0.12846659123897552, |
|
"learning_rate": 3.209426695318182e-05, |
|
"loss": 0.4857, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.6413708690330477, |
|
"grad_norm": 0.11735550314188004, |
|
"learning_rate": 3.133676726352438e-05, |
|
"loss": 0.542, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.6462668298653611, |
|
"grad_norm": 0.09229713678359985, |
|
"learning_rate": 3.0584212529619775e-05, |
|
"loss": 0.4807, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6511627906976745, |
|
"grad_norm": 0.1247001513838768, |
|
"learning_rate": 2.9836802146177034e-05, |
|
"loss": 0.5123, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.6560587515299877, |
|
"grad_norm": 0.12723222374916077, |
|
"learning_rate": 2.9094734144873036e-05, |
|
"loss": 0.7619, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.6609547123623011, |
|
"grad_norm": 0.06785371154546738, |
|
"learning_rate": 2.835820514188273e-05, |
|
"loss": 0.488, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6658506731946144, |
|
"grad_norm": 0.07913102209568024, |
|
"learning_rate": 2.7627410285784163e-05, |
|
"loss": 0.5913, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6707466340269278, |
|
"grad_norm": 0.10038694739341736, |
|
"learning_rate": 2.6902543205852492e-05, |
|
"loss": 0.6774, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6756425948592412, |
|
"grad_norm": 0.06108963489532471, |
|
"learning_rate": 2.618379596075668e-05, |
|
"loss": 0.304, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6805385556915544, |
|
"grad_norm": 0.06344286352396011, |
|
"learning_rate": 2.5471358987672017e-05, |
|
"loss": 0.394, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.6854345165238678, |
|
"grad_norm": 0.1912202686071396, |
|
"learning_rate": 2.476542105182254e-05, |
|
"loss": 0.4608, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6903304773561811, |
|
"grad_norm": 0.0918666198849678, |
|
"learning_rate": 2.4066169196466326e-05, |
|
"loss": 0.7695, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6952264381884945, |
|
"grad_norm": 0.09997723251581192, |
|
"learning_rate": 2.3373788693337024e-05, |
|
"loss": 0.5197, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.7001223990208079, |
|
"grad_norm": 0.08384369313716888, |
|
"learning_rate": 2.268846299355481e-05, |
|
"loss": 0.4842, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.7050183598531212, |
|
"grad_norm": 0.14654509723186493, |
|
"learning_rate": 2.2010373679019776e-05, |
|
"loss": 0.4083, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.7099143206854345, |
|
"grad_norm": 0.23128315806388855, |
|
"learning_rate": 2.133970041430044e-05, |
|
"loss": 0.6992, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.7148102815177478, |
|
"grad_norm": 0.09588706493377686, |
|
"learning_rate": 2.067662089903039e-05, |
|
"loss": 0.5621, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.7197062423500612, |
|
"grad_norm": 0.10101126879453659, |
|
"learning_rate": 2.002131082082549e-05, |
|
"loss": 0.4135, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.7246022031823746, |
|
"grad_norm": 0.15346215665340424, |
|
"learning_rate": 1.937394380873418e-05, |
|
"loss": 0.5933, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.7294981640146879, |
|
"grad_norm": 0.07918453961610794, |
|
"learning_rate": 1.873469138723325e-05, |
|
"loss": 0.3081, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.7343941248470012, |
|
"grad_norm": 0.0649547427892685, |
|
"learning_rate": 1.8103722930781247e-05, |
|
"loss": 0.407, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7392900856793145, |
|
"grad_norm": 0.07419539242982864, |
|
"learning_rate": 1.748120561894147e-05, |
|
"loss": 0.7313, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.7441860465116279, |
|
"grad_norm": 0.09094121307134628, |
|
"learning_rate": 1.6867304392086575e-05, |
|
"loss": 0.3871, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.7490820073439413, |
|
"grad_norm": 0.10387779027223587, |
|
"learning_rate": 1.6262181907696454e-05, |
|
"loss": 0.5026, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.7539779681762546, |
|
"grad_norm": 0.22015878558158875, |
|
"learning_rate": 1.5665998497260958e-05, |
|
"loss": 0.4817, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.758873929008568, |
|
"grad_norm": 0.06719034910202026, |
|
"learning_rate": 1.5078912123798961e-05, |
|
"loss": 0.5186, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.7637698898408812, |
|
"grad_norm": 0.07599823921918869, |
|
"learning_rate": 1.4501078340004953e-05, |
|
"loss": 0.4275, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.7686658506731946, |
|
"grad_norm": 0.1412460058927536, |
|
"learning_rate": 1.3932650247034218e-05, |
|
"loss": 0.626, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.773561811505508, |
|
"grad_norm": 0.1420402228832245, |
|
"learning_rate": 1.337377845393763e-05, |
|
"loss": 0.5193, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.7784577723378213, |
|
"grad_norm": 0.09665469825267792, |
|
"learning_rate": 1.2824611037756684e-05, |
|
"loss": 0.5848, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.7833537331701347, |
|
"grad_norm": 0.06045060604810715, |
|
"learning_rate": 1.2285293504289447e-05, |
|
"loss": 0.2627, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7882496940024479, |
|
"grad_norm": 0.08824898302555084, |
|
"learning_rate": 1.1755968749537754e-05, |
|
"loss": 0.5949, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.7931456548347613, |
|
"grad_norm": 0.13809214532375336, |
|
"learning_rate": 1.1236777021845956e-05, |
|
"loss": 0.7397, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.7980416156670747, |
|
"grad_norm": 0.12224618345499039, |
|
"learning_rate": 1.0727855884741056e-05, |
|
"loss": 0.553, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.802937576499388, |
|
"grad_norm": 0.07628399133682251, |
|
"learning_rate": 1.022934018048432e-05, |
|
"loss": 0.3447, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.8078335373317014, |
|
"grad_norm": 0.06734273582696915, |
|
"learning_rate": 9.741361994343867e-06, |
|
"loss": 0.7473, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.8127294981640147, |
|
"grad_norm": 0.0891406238079071, |
|
"learning_rate": 9.264050619597697e-06, |
|
"loss": 0.5008, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.817625458996328, |
|
"grad_norm": 0.08535400032997131, |
|
"learning_rate": 8.797532523276542e-06, |
|
"loss": 0.4171, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.8225214198286414, |
|
"grad_norm": 0.3191221058368683, |
|
"learning_rate": 8.341931312655582e-06, |
|
"loss": 0.7633, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.8274173806609547, |
|
"grad_norm": 0.11288396269083023, |
|
"learning_rate": 7.897367702503756e-06, |
|
"loss": 0.4054, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.8323133414932681, |
|
"grad_norm": 0.09110561013221741, |
|
"learning_rate": 7.463959483099547e-06, |
|
"loss": 0.4642, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.8372093023255814, |
|
"grad_norm": 0.08345432579517365, |
|
"learning_rate": 7.041821489021639e-06, |
|
"loss": 0.3769, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.8421052631578947, |
|
"grad_norm": 0.19082055985927582, |
|
"learning_rate": 6.631065568722633e-06, |
|
"loss": 0.4487, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.847001223990208, |
|
"grad_norm": 0.06951060146093369, |
|
"learning_rate": 6.231800554894029e-06, |
|
"loss": 0.2915, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.8518971848225214, |
|
"grad_norm": 0.08803869783878326, |
|
"learning_rate": 5.844132235630273e-06, |
|
"loss": 0.5065, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.8567931456548348, |
|
"grad_norm": 0.06576603651046753, |
|
"learning_rate": 5.468163326399389e-06, |
|
"loss": 0.4307, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.8616891064871481, |
|
"grad_norm": 0.11544258892536163, |
|
"learning_rate": 5.103993442827831e-06, |
|
"loss": 0.6356, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.8665850673194615, |
|
"grad_norm": 0.08324428647756577, |
|
"learning_rate": 4.751719074306604e-06, |
|
"loss": 0.5426, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.8714810281517748, |
|
"grad_norm": 0.0815030187368393, |
|
"learning_rate": 4.411433558425698e-06, |
|
"loss": 0.4278, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.8763769889840881, |
|
"grad_norm": 0.06282901763916016, |
|
"learning_rate": 4.083227056243644e-06, |
|
"loss": 0.2847, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.8812729498164015, |
|
"grad_norm": 0.07095268368721008, |
|
"learning_rate": 3.767186528398725e-06, |
|
"loss": 0.3923, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8861689106487148, |
|
"grad_norm": 0.09169545769691467, |
|
"learning_rate": 3.4633957120681293e-06, |
|
"loss": 0.5155, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.8910648714810282, |
|
"grad_norm": 0.24409419298171997, |
|
"learning_rate": 3.1719350987811534e-06, |
|
"loss": 0.5456, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.8959608323133414, |
|
"grad_norm": 0.08426636457443237, |
|
"learning_rate": 2.8928819130924657e-06, |
|
"loss": 0.5679, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.9008567931456548, |
|
"grad_norm": 0.23968654870986938, |
|
"learning_rate": 2.6263100921208482e-06, |
|
"loss": 0.4841, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.9057527539779682, |
|
"grad_norm": 0.10230088233947754, |
|
"learning_rate": 2.372290265959065e-06, |
|
"loss": 0.6307, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.9106487148102815, |
|
"grad_norm": 0.07012014836072922, |
|
"learning_rate": 2.130889738959946e-06, |
|
"loss": 0.489, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.9155446756425949, |
|
"grad_norm": 0.0840025320649147, |
|
"learning_rate": 1.9021724719035628e-06, |
|
"loss": 0.3811, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.9204406364749081, |
|
"grad_norm": 0.06925945729017258, |
|
"learning_rate": 1.6861990650504255e-06, |
|
"loss": 0.3263, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.9253365973072215, |
|
"grad_norm": 0.07820533215999603, |
|
"learning_rate": 1.4830267420849585e-06, |
|
"loss": 0.5176, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.9302325581395349, |
|
"grad_norm": 0.10654879361391068, |
|
"learning_rate": 1.292709334953729e-06, |
|
"loss": 0.5375, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.9351285189718482, |
|
"grad_norm": 0.06305018812417984, |
|
"learning_rate": 1.1152972696022445e-06, |
|
"loss": 0.4505, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.9400244798041616, |
|
"grad_norm": 0.08257196098566055, |
|
"learning_rate": 9.508375526142976e-07, |
|
"loss": 0.4773, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.944920440636475, |
|
"grad_norm": 0.0973181426525116, |
|
"learning_rate": 7.993737587571826e-07, |
|
"loss": 0.4163, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.9498164014687882, |
|
"grad_norm": 0.1023239865899086, |
|
"learning_rate": 6.609460194362927e-07, |
|
"loss": 0.4597, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.9547123623011016, |
|
"grad_norm": 0.09319756925106049, |
|
"learning_rate": 5.355910120620034e-07, |
|
"loss": 0.5937, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.9596083231334149, |
|
"grad_norm": 0.07661579549312592, |
|
"learning_rate": 4.233419503317182e-07, |
|
"loss": 0.6014, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.9645042839657283, |
|
"grad_norm": 0.07042757421731949, |
|
"learning_rate": 3.242285754296859e-07, |
|
"loss": 0.4437, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.9694002447980417, |
|
"grad_norm": 0.07441776990890503, |
|
"learning_rate": 2.3827714814686486e-07, |
|
"loss": 0.2822, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.9742962056303549, |
|
"grad_norm": 0.06588555127382278, |
|
"learning_rate": 1.655104419229281e-07, |
|
"loss": 0.3, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.9791921664626683, |
|
"grad_norm": 0.06164183467626572, |
|
"learning_rate": 1.059477368122841e-07, |
|
"loss": 0.3109, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9840881272949816, |
|
"grad_norm": 0.13465183973312378, |
|
"learning_rate": 5.960481437568555e-08, |
|
"loss": 0.8621, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.988984088127295, |
|
"grad_norm": 0.06679002195596695, |
|
"learning_rate": 2.649395349879069e-08, |
|
"loss": 0.5233, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.9938800489596084, |
|
"grad_norm": 0.07267452776432037, |
|
"learning_rate": 6.623927138804664e-09, |
|
"loss": 0.59, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.9987760097919217, |
|
"grad_norm": 0.07579752057790756, |
|
"learning_rate": 0.0, |
|
"loss": 0.4031, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.9987760097919217, |
|
"step": 204, |
|
"total_flos": 1.7231028658783027e+17, |
|
"train_loss": 0.4889225305295458, |
|
"train_runtime": 5417.273, |
|
"train_samples_per_second": 0.151, |
|
"train_steps_per_second": 0.038 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 204, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.7231028658783027e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|