|
{ |
|
"best_metric": 0.9820159673690796, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.38077106139933364, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0019038553069966682, |
|
"grad_norm": 23.7499942779541, |
|
"learning_rate": 7e-06, |
|
"loss": 7.21, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0019038553069966682, |
|
"eval_loss": 2.085144281387329, |
|
"eval_runtime": 64.4441, |
|
"eval_samples_per_second": 13.733, |
|
"eval_steps_per_second": 3.445, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0038077106139933364, |
|
"grad_norm": 27.487117767333984, |
|
"learning_rate": 1.4e-05, |
|
"loss": 7.3893, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005711565920990005, |
|
"grad_norm": 24.298641204833984, |
|
"learning_rate": 2.1e-05, |
|
"loss": 7.4764, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.007615421227986673, |
|
"grad_norm": 20.819686889648438, |
|
"learning_rate": 2.8e-05, |
|
"loss": 7.2008, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00951927653498334, |
|
"grad_norm": 17.770313262939453, |
|
"learning_rate": 3.5e-05, |
|
"loss": 6.8417, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01142313184198001, |
|
"grad_norm": 17.92336654663086, |
|
"learning_rate": 4.2e-05, |
|
"loss": 6.5631, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.013326987148976678, |
|
"grad_norm": 16.182708740234375, |
|
"learning_rate": 4.899999999999999e-05, |
|
"loss": 6.0062, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.015230842455973346, |
|
"grad_norm": 14.703605651855469, |
|
"learning_rate": 5.6e-05, |
|
"loss": 6.5074, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.017134697762970014, |
|
"grad_norm": 15.21464729309082, |
|
"learning_rate": 6.3e-05, |
|
"loss": 6.0345, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01903855306996668, |
|
"grad_norm": 13.005841255187988, |
|
"learning_rate": 7e-05, |
|
"loss": 5.738, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.020942408376963352, |
|
"grad_norm": 13.063154220581055, |
|
"learning_rate": 6.999521567473641e-05, |
|
"loss": 5.8279, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02284626368396002, |
|
"grad_norm": 12.097558975219727, |
|
"learning_rate": 6.998086400693241e-05, |
|
"loss": 5.8253, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.024750118990956686, |
|
"grad_norm": 11.619023323059082, |
|
"learning_rate": 6.995694892019065e-05, |
|
"loss": 5.7643, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.026653974297953357, |
|
"grad_norm": 13.587701797485352, |
|
"learning_rate": 6.99234769526571e-05, |
|
"loss": 5.5242, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.028557829604950024, |
|
"grad_norm": 11.041449546813965, |
|
"learning_rate": 6.988045725523343e-05, |
|
"loss": 5.3918, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03046168491194669, |
|
"grad_norm": 12.899657249450684, |
|
"learning_rate": 6.982790158907539e-05, |
|
"loss": 5.7544, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03236554021894336, |
|
"grad_norm": 13.852838516235352, |
|
"learning_rate": 6.976582432237733e-05, |
|
"loss": 5.3847, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03426939552594003, |
|
"grad_norm": 11.95140552520752, |
|
"learning_rate": 6.969424242644413e-05, |
|
"loss": 5.6636, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.036173250832936696, |
|
"grad_norm": 11.032059669494629, |
|
"learning_rate": 6.961317547105138e-05, |
|
"loss": 5.0469, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03807710613993336, |
|
"grad_norm": 11.564652442932129, |
|
"learning_rate": 6.952264561909527e-05, |
|
"loss": 5.602, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03998096144693003, |
|
"grad_norm": 11.808911323547363, |
|
"learning_rate": 6.942267762053337e-05, |
|
"loss": 5.9484, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.041884816753926704, |
|
"grad_norm": 10.792513847351074, |
|
"learning_rate": 6.931329880561832e-05, |
|
"loss": 5.3864, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04378867206092337, |
|
"grad_norm": 10.414179801940918, |
|
"learning_rate": 6.919453907742597e-05, |
|
"loss": 5.5784, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04569252736792004, |
|
"grad_norm": 10.658806800842285, |
|
"learning_rate": 6.90664309036802e-05, |
|
"loss": 5.2816, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.047596382674916705, |
|
"grad_norm": 11.604804039001465, |
|
"learning_rate": 6.892900930787656e-05, |
|
"loss": 5.4807, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04950023798191337, |
|
"grad_norm": 10.047147750854492, |
|
"learning_rate": 6.87823118597072e-05, |
|
"loss": 5.3292, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05140409328891004, |
|
"grad_norm": 10.286588668823242, |
|
"learning_rate": 6.862637866478969e-05, |
|
"loss": 5.2082, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.053307948595906714, |
|
"grad_norm": 10.427512168884277, |
|
"learning_rate": 6.846125235370252e-05, |
|
"loss": 5.3268, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.05521180390290338, |
|
"grad_norm": 10.335875511169434, |
|
"learning_rate": 6.828697807033038e-05, |
|
"loss": 5.4725, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05711565920990005, |
|
"grad_norm": 10.073786735534668, |
|
"learning_rate": 6.81036034595222e-05, |
|
"loss": 5.3723, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.059019514516896715, |
|
"grad_norm": 9.964614868164062, |
|
"learning_rate": 6.791117865406564e-05, |
|
"loss": 5.3471, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.06092336982389338, |
|
"grad_norm": 9.527873992919922, |
|
"learning_rate": 6.770975626098112e-05, |
|
"loss": 5.1568, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06282722513089005, |
|
"grad_norm": 10.344468116760254, |
|
"learning_rate": 6.749939134713974e-05, |
|
"loss": 5.3934, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06473108043788672, |
|
"grad_norm": 10.566803932189941, |
|
"learning_rate": 6.728014142420846e-05, |
|
"loss": 5.5978, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06663493574488338, |
|
"grad_norm": 9.318648338317871, |
|
"learning_rate": 6.7052066432927e-05, |
|
"loss": 5.2326, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06853879105188006, |
|
"grad_norm": 10.439335823059082, |
|
"learning_rate": 6.681522872672069e-05, |
|
"loss": 5.4662, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07044264635887673, |
|
"grad_norm": 9.824296951293945, |
|
"learning_rate": 6.656969305465356e-05, |
|
"loss": 5.2866, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07234650166587339, |
|
"grad_norm": 9.726062774658203, |
|
"learning_rate": 6.631552654372672e-05, |
|
"loss": 5.2126, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07425035697287007, |
|
"grad_norm": 11.502147674560547, |
|
"learning_rate": 6.60527986805264e-05, |
|
"loss": 5.6166, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07615421227986673, |
|
"grad_norm": 10.295775413513184, |
|
"learning_rate": 6.578158129222711e-05, |
|
"loss": 5.5236, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0780580675868634, |
|
"grad_norm": 9.807427406311035, |
|
"learning_rate": 6.550194852695469e-05, |
|
"loss": 5.4277, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07996192289386006, |
|
"grad_norm": 10.789260864257812, |
|
"learning_rate": 6.521397683351509e-05, |
|
"loss": 5.74, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.08186577820085673, |
|
"grad_norm": 10.294041633605957, |
|
"learning_rate": 6.491774494049386e-05, |
|
"loss": 5.4681, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08376963350785341, |
|
"grad_norm": 9.753371238708496, |
|
"learning_rate": 6.461333383473272e-05, |
|
"loss": 5.6462, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08567348881485007, |
|
"grad_norm": 9.903716087341309, |
|
"learning_rate": 6.430082673918849e-05, |
|
"loss": 5.2489, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08757734412184674, |
|
"grad_norm": 9.664334297180176, |
|
"learning_rate": 6.398030909018069e-05, |
|
"loss": 5.2939, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0894811994288434, |
|
"grad_norm": 10.391850471496582, |
|
"learning_rate": 6.365186851403423e-05, |
|
"loss": 5.8567, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.09138505473584008, |
|
"grad_norm": 11.669763565063477, |
|
"learning_rate": 6.331559480312315e-05, |
|
"loss": 5.7577, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.09328891004283675, |
|
"grad_norm": 9.692889213562012, |
|
"learning_rate": 6.297157989132236e-05, |
|
"loss": 5.4625, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09519276534983341, |
|
"grad_norm": 11.684773445129395, |
|
"learning_rate": 6.261991782887377e-05, |
|
"loss": 5.6681, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09519276534983341, |
|
"eval_loss": 1.3040165901184082, |
|
"eval_runtime": 65.4623, |
|
"eval_samples_per_second": 13.519, |
|
"eval_steps_per_second": 3.391, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09709662065683008, |
|
"grad_norm": 15.806927680969238, |
|
"learning_rate": 6.226070475667393e-05, |
|
"loss": 5.6676, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09900047596382675, |
|
"grad_norm": 11.198895454406738, |
|
"learning_rate": 6.189403887999006e-05, |
|
"loss": 5.4295, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.10090433127082342, |
|
"grad_norm": 8.934762001037598, |
|
"learning_rate": 6.152002044161171e-05, |
|
"loss": 5.408, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.10280818657782008, |
|
"grad_norm": 7.409307479858398, |
|
"learning_rate": 6.113875169444539e-05, |
|
"loss": 5.2086, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.10471204188481675, |
|
"grad_norm": 8.708901405334473, |
|
"learning_rate": 6.0750336873559605e-05, |
|
"loss": 5.4806, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.10661589719181343, |
|
"grad_norm": 8.244229316711426, |
|
"learning_rate": 6.035488216768811e-05, |
|
"loss": 5.1627, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10851975249881009, |
|
"grad_norm": 7.6382060050964355, |
|
"learning_rate": 5.9952495690198894e-05, |
|
"loss": 4.7832, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.11042360780580676, |
|
"grad_norm": 7.794398784637451, |
|
"learning_rate": 5.954328744953709e-05, |
|
"loss": 4.759, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.11232746311280342, |
|
"grad_norm": 8.159358978271484, |
|
"learning_rate": 5.91273693191498e-05, |
|
"loss": 4.8963, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1142313184198001, |
|
"grad_norm": 8.03155517578125, |
|
"learning_rate": 5.870485500690094e-05, |
|
"loss": 5.1858, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11613517372679677, |
|
"grad_norm": 9.014118194580078, |
|
"learning_rate": 5.827586002398468e-05, |
|
"loss": 5.2089, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.11803902903379343, |
|
"grad_norm": 8.561113357543945, |
|
"learning_rate": 5.784050165334589e-05, |
|
"loss": 4.8377, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.1199428843407901, |
|
"grad_norm": 9.359479904174805, |
|
"learning_rate": 5.739889891761608e-05, |
|
"loss": 4.8591, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.12184673964778676, |
|
"grad_norm": 8.884666442871094, |
|
"learning_rate": 5.6951172546573794e-05, |
|
"loss": 4.8723, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.12375059495478344, |
|
"grad_norm": 8.695745468139648, |
|
"learning_rate": 5.6497444944138376e-05, |
|
"loss": 5.0063, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1256544502617801, |
|
"grad_norm": 7.5747456550598145, |
|
"learning_rate": 5.603784015490587e-05, |
|
"loss": 4.7092, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.1275583055687768, |
|
"grad_norm": 7.801674842834473, |
|
"learning_rate": 5.557248383023655e-05, |
|
"loss": 4.7957, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12946216087577345, |
|
"grad_norm": 8.105006217956543, |
|
"learning_rate": 5.510150319390302e-05, |
|
"loss": 4.4399, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1313660161827701, |
|
"grad_norm": 8.557613372802734, |
|
"learning_rate": 5.4625027007308546e-05, |
|
"loss": 4.885, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.13326987148976677, |
|
"grad_norm": 11.274003028869629, |
|
"learning_rate": 5.414318553428494e-05, |
|
"loss": 4.8082, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.13517372679676345, |
|
"grad_norm": 11.312886238098145, |
|
"learning_rate": 5.3656110505479776e-05, |
|
"loss": 4.7281, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13707758210376011, |
|
"grad_norm": 8.340498924255371, |
|
"learning_rate": 5.316393508234253e-05, |
|
"loss": 4.7314, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13898143741075678, |
|
"grad_norm": 8.157063484191895, |
|
"learning_rate": 5.266679382071953e-05, |
|
"loss": 4.9252, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.14088529271775346, |
|
"grad_norm": 8.306255340576172, |
|
"learning_rate": 5.216482263406778e-05, |
|
"loss": 4.6616, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.14278914802475012, |
|
"grad_norm": 8.908082962036133, |
|
"learning_rate": 5.1658158756297576e-05, |
|
"loss": 4.8742, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.14469300333174678, |
|
"grad_norm": 7.921310901641846, |
|
"learning_rate": 5.114694070425407e-05, |
|
"loss": 4.4142, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.14659685863874344, |
|
"grad_norm": 7.845428466796875, |
|
"learning_rate": 5.063130823984823e-05, |
|
"loss": 4.4547, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14850071394574013, |
|
"grad_norm": 8.892997741699219, |
|
"learning_rate": 5.011140233184724e-05, |
|
"loss": 5.108, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1504045692527368, |
|
"grad_norm": 9.606233596801758, |
|
"learning_rate": 4.958736511733516e-05, |
|
"loss": 4.6994, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.15230842455973345, |
|
"grad_norm": 8.047012329101562, |
|
"learning_rate": 4.905933986285393e-05, |
|
"loss": 4.6938, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.15421227986673014, |
|
"grad_norm": 9.193869590759277, |
|
"learning_rate": 4.8527470925235824e-05, |
|
"loss": 4.8014, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1561161351737268, |
|
"grad_norm": 7.83010196685791, |
|
"learning_rate": 4.799190371213772e-05, |
|
"loss": 4.6519, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.15801999048072346, |
|
"grad_norm": 7.466574668884277, |
|
"learning_rate": 4.745278464228808e-05, |
|
"loss": 4.4398, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15992384578772012, |
|
"grad_norm": 8.107156753540039, |
|
"learning_rate": 4.69102611054575e-05, |
|
"loss": 4.6071, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1618277010947168, |
|
"grad_norm": 8.724653244018555, |
|
"learning_rate": 4.6364481422163926e-05, |
|
"loss": 4.9037, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.16373155640171347, |
|
"grad_norm": 7.404033660888672, |
|
"learning_rate": 4.581559480312316e-05, |
|
"loss": 4.3696, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.16563541170871013, |
|
"grad_norm": 8.051767349243164, |
|
"learning_rate": 4.526375130845627e-05, |
|
"loss": 4.5589, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.16753926701570682, |
|
"grad_norm": 9.341611862182617, |
|
"learning_rate": 4.4709101806664554e-05, |
|
"loss": 4.7734, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16944312232270348, |
|
"grad_norm": 8.678064346313477, |
|
"learning_rate": 4.4151797933383685e-05, |
|
"loss": 4.6396, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.17134697762970014, |
|
"grad_norm": 8.946483612060547, |
|
"learning_rate": 4.359199204992797e-05, |
|
"loss": 4.7698, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.17325083293669682, |
|
"grad_norm": 8.687679290771484, |
|
"learning_rate": 4.30298372016363e-05, |
|
"loss": 4.7269, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.17515468824369348, |
|
"grad_norm": 10.165061950683594, |
|
"learning_rate": 4.246548707603114e-05, |
|
"loss": 4.9481, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.17705854355069015, |
|
"grad_norm": 8.327232360839844, |
|
"learning_rate": 4.1899095960801805e-05, |
|
"loss": 4.6007, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.1789623988576868, |
|
"grad_norm": 8.452240943908691, |
|
"learning_rate": 4.133081870162385e-05, |
|
"loss": 4.6878, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1808662541646835, |
|
"grad_norm": 8.48222827911377, |
|
"learning_rate": 4.076081065982569e-05, |
|
"loss": 4.4604, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.18277010947168015, |
|
"grad_norm": 9.326024055480957, |
|
"learning_rate": 4.018922766991447e-05, |
|
"loss": 4.7372, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1846739647786768, |
|
"grad_norm": 9.113911628723145, |
|
"learning_rate": 3.961622599697241e-05, |
|
"loss": 4.4883, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1865778200856735, |
|
"grad_norm": 8.056068420410156, |
|
"learning_rate": 3.9041962293935516e-05, |
|
"loss": 4.8074, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.18848167539267016, |
|
"grad_norm": 8.778728485107422, |
|
"learning_rate": 3.84665935587662e-05, |
|
"loss": 4.4564, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.19038553069966682, |
|
"grad_norm": 8.800410270690918, |
|
"learning_rate": 3.7890277091531636e-05, |
|
"loss": 4.6041, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19038553069966682, |
|
"eval_loss": 1.1414027214050293, |
|
"eval_runtime": 65.4902, |
|
"eval_samples_per_second": 13.513, |
|
"eval_steps_per_second": 3.39, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19228938600666348, |
|
"grad_norm": 8.70529556274414, |
|
"learning_rate": 3.7313170451399475e-05, |
|
"loss": 5.0098, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.19419324131366017, |
|
"grad_norm": 8.006318092346191, |
|
"learning_rate": 3.673543141356278e-05, |
|
"loss": 5.0347, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.19609709662065683, |
|
"grad_norm": 7.4283013343811035, |
|
"learning_rate": 3.6157217926105783e-05, |
|
"loss": 4.8268, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1980009519276535, |
|
"grad_norm": 6.7939605712890625, |
|
"learning_rate": 3.557868806682255e-05, |
|
"loss": 4.7183, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.19990480723465018, |
|
"grad_norm": 6.694666862487793, |
|
"learning_rate": 3.5e-05, |
|
"loss": 4.6545, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.20180866254164684, |
|
"grad_norm": 6.7515692710876465, |
|
"learning_rate": 3.442131193317745e-05, |
|
"loss": 4.5719, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.2037125178486435, |
|
"grad_norm": 6.9047932624816895, |
|
"learning_rate": 3.384278207389421e-05, |
|
"loss": 4.3211, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.20561637315564016, |
|
"grad_norm": 8.433969497680664, |
|
"learning_rate": 3.3264568586437216e-05, |
|
"loss": 4.414, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.20752022846263685, |
|
"grad_norm": 6.5694050788879395, |
|
"learning_rate": 3.268682954860052e-05, |
|
"loss": 4.2368, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.2094240837696335, |
|
"grad_norm": 7.584961891174316, |
|
"learning_rate": 3.210972290846837e-05, |
|
"loss": 4.6716, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21132793907663017, |
|
"grad_norm": 6.995484828948975, |
|
"learning_rate": 3.15334064412338e-05, |
|
"loss": 4.3203, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.21323179438362685, |
|
"grad_norm": 7.1296467781066895, |
|
"learning_rate": 3.0958037706064485e-05, |
|
"loss": 4.5241, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.21513564969062351, |
|
"grad_norm": 7.044524669647217, |
|
"learning_rate": 3.038377400302758e-05, |
|
"loss": 4.4863, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.21703950499762018, |
|
"grad_norm": 7.043889045715332, |
|
"learning_rate": 2.9810772330085524e-05, |
|
"loss": 4.4081, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.21894336030461684, |
|
"grad_norm": 8.038551330566406, |
|
"learning_rate": 2.9239189340174306e-05, |
|
"loss": 4.5455, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.22084721561161352, |
|
"grad_norm": 7.743556499481201, |
|
"learning_rate": 2.8669181298376163e-05, |
|
"loss": 4.3879, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.22275107091861018, |
|
"grad_norm": 7.689728260040283, |
|
"learning_rate": 2.8100904039198193e-05, |
|
"loss": 4.3637, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.22465492622560684, |
|
"grad_norm": 7.642689228057861, |
|
"learning_rate": 2.7534512923968863e-05, |
|
"loss": 4.5594, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.22655878153260353, |
|
"grad_norm": 8.036393165588379, |
|
"learning_rate": 2.6970162798363695e-05, |
|
"loss": 4.2413, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2284626368396002, |
|
"grad_norm": 7.012694358825684, |
|
"learning_rate": 2.640800795007203e-05, |
|
"loss": 4.2993, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23036649214659685, |
|
"grad_norm": 7.756348133087158, |
|
"learning_rate": 2.5848202066616305e-05, |
|
"loss": 4.1293, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.23227034745359354, |
|
"grad_norm": 7.184790134429932, |
|
"learning_rate": 2.5290898193335446e-05, |
|
"loss": 4.0667, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2341742027605902, |
|
"grad_norm": 6.861097812652588, |
|
"learning_rate": 2.4736248691543736e-05, |
|
"loss": 3.9088, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.23607805806758686, |
|
"grad_norm": 7.792126655578613, |
|
"learning_rate": 2.4184405196876842e-05, |
|
"loss": 4.0982, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.23798191337458352, |
|
"grad_norm": 8.279322624206543, |
|
"learning_rate": 2.363551857783608e-05, |
|
"loss": 4.273, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2398857686815802, |
|
"grad_norm": 7.2576494216918945, |
|
"learning_rate": 2.308973889454249e-05, |
|
"loss": 4.133, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.24178962398857687, |
|
"grad_norm": 8.2999906539917, |
|
"learning_rate": 2.2547215357711918e-05, |
|
"loss": 4.0224, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.24369347929557353, |
|
"grad_norm": 8.445121765136719, |
|
"learning_rate": 2.2008096287862266e-05, |
|
"loss": 4.2589, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.24559733460257022, |
|
"grad_norm": 7.673584938049316, |
|
"learning_rate": 2.1472529074764177e-05, |
|
"loss": 4.3057, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.24750118990956688, |
|
"grad_norm": 7.786920547485352, |
|
"learning_rate": 2.0940660137146074e-05, |
|
"loss": 4.2528, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.24940504521656354, |
|
"grad_norm": 7.61902379989624, |
|
"learning_rate": 2.041263488266484e-05, |
|
"loss": 3.7492, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2513089005235602, |
|
"grad_norm": 7.465391159057617, |
|
"learning_rate": 1.988859766815275e-05, |
|
"loss": 3.981, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.25321275583055686, |
|
"grad_norm": 7.733811855316162, |
|
"learning_rate": 1.9368691760151773e-05, |
|
"loss": 4.1569, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.2551166111375536, |
|
"grad_norm": 7.370545387268066, |
|
"learning_rate": 1.885305929574593e-05, |
|
"loss": 3.874, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.25702046644455023, |
|
"grad_norm": 8.920854568481445, |
|
"learning_rate": 1.8341841243702424e-05, |
|
"loss": 4.3121, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2589243217515469, |
|
"grad_norm": 9.357240676879883, |
|
"learning_rate": 1.7835177365932225e-05, |
|
"loss": 4.524, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.26082817705854355, |
|
"grad_norm": 8.812352180480957, |
|
"learning_rate": 1.7333206179280478e-05, |
|
"loss": 3.9994, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.2627320323655402, |
|
"grad_norm": 8.463519096374512, |
|
"learning_rate": 1.6836064917657478e-05, |
|
"loss": 4.0683, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.2646358876725369, |
|
"grad_norm": 7.832219123840332, |
|
"learning_rate": 1.6343889494520224e-05, |
|
"loss": 3.9524, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.26653974297953353, |
|
"grad_norm": 8.509306907653809, |
|
"learning_rate": 1.5856814465715064e-05, |
|
"loss": 3.9898, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.26844359828653025, |
|
"grad_norm": 8.911566734313965, |
|
"learning_rate": 1.5374972992691458e-05, |
|
"loss": 3.9925, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2703474535935269, |
|
"grad_norm": 9.420488357543945, |
|
"learning_rate": 1.4898496806096974e-05, |
|
"loss": 4.1377, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.27225130890052357, |
|
"grad_norm": 8.05871868133545, |
|
"learning_rate": 1.4427516169763444e-05, |
|
"loss": 3.8602, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.27415516420752023, |
|
"grad_norm": 8.699991226196289, |
|
"learning_rate": 1.396215984509412e-05, |
|
"loss": 4.0414, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.2760590195145169, |
|
"grad_norm": 8.904756546020508, |
|
"learning_rate": 1.3502555055861625e-05, |
|
"loss": 4.4121, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.27796287482151355, |
|
"grad_norm": 10.7162504196167, |
|
"learning_rate": 1.3048827453426203e-05, |
|
"loss": 4.5674, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2798667301285102, |
|
"grad_norm": 10.042287826538086, |
|
"learning_rate": 1.2601101082383917e-05, |
|
"loss": 4.6137, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2817705854355069, |
|
"grad_norm": 9.6063871383667, |
|
"learning_rate": 1.2159498346654094e-05, |
|
"loss": 4.24, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.2836744407425036, |
|
"grad_norm": 11.02759838104248, |
|
"learning_rate": 1.1724139976015306e-05, |
|
"loss": 4.5189, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.28557829604950025, |
|
"grad_norm": 9.639123916625977, |
|
"learning_rate": 1.1295144993099068e-05, |
|
"loss": 4.4337, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.28557829604950025, |
|
"eval_loss": 1.0172055959701538, |
|
"eval_runtime": 65.4679, |
|
"eval_samples_per_second": 13.518, |
|
"eval_steps_per_second": 3.391, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2874821513564969, |
|
"grad_norm": 7.883493900299072, |
|
"learning_rate": 1.0872630680850196e-05, |
|
"loss": 4.6472, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.28938600666349357, |
|
"grad_norm": 8.46251106262207, |
|
"learning_rate": 1.0456712550462898e-05, |
|
"loss": 4.3637, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2912898619704902, |
|
"grad_norm": 8.094184875488281, |
|
"learning_rate": 1.0047504309801104e-05, |
|
"loss": 4.2769, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.2931937172774869, |
|
"grad_norm": 8.725149154663086, |
|
"learning_rate": 9.645117832311886e-06, |
|
"loss": 4.4069, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2950975725844836, |
|
"grad_norm": 7.509702205657959, |
|
"learning_rate": 9.249663126440394e-06, |
|
"loss": 3.7436, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.29700142789148026, |
|
"grad_norm": 8.107771873474121, |
|
"learning_rate": 8.861248305554624e-06, |
|
"loss": 4.3415, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2989052831984769, |
|
"grad_norm": 7.017758369445801, |
|
"learning_rate": 8.47997955838829e-06, |
|
"loss": 4.0017, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.3008091385054736, |
|
"grad_norm": 7.199747562408447, |
|
"learning_rate": 8.10596112000994e-06, |
|
"loss": 4.1551, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.30271299381247024, |
|
"grad_norm": 6.826944351196289, |
|
"learning_rate": 7.739295243326067e-06, |
|
"loss": 3.8882, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.3046168491194669, |
|
"grad_norm": 7.0702972412109375, |
|
"learning_rate": 7.380082171126228e-06, |
|
"loss": 4.1228, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.30652070442646356, |
|
"grad_norm": 8.110949516296387, |
|
"learning_rate": 7.028420108677635e-06, |
|
"loss": 4.2441, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.3084245597334603, |
|
"grad_norm": 6.716869354248047, |
|
"learning_rate": 6.684405196876842e-06, |
|
"loss": 3.8069, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.31032841504045694, |
|
"grad_norm": 7.542235374450684, |
|
"learning_rate": 6.3481314859657675e-06, |
|
"loss": 4.1167, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.3122322703474536, |
|
"grad_norm": 7.276134014129639, |
|
"learning_rate": 6.019690909819298e-06, |
|
"loss": 3.9686, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.31413612565445026, |
|
"grad_norm": 7.606207370758057, |
|
"learning_rate": 5.6991732608115e-06, |
|
"loss": 3.8366, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3160399809614469, |
|
"grad_norm": 7.363119125366211, |
|
"learning_rate": 5.386666165267256e-06, |
|
"loss": 3.9421, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3179438362684436, |
|
"grad_norm": 7.743343830108643, |
|
"learning_rate": 5.08225505950613e-06, |
|
"loss": 4.0282, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.31984769157544024, |
|
"grad_norm": 7.004614353179932, |
|
"learning_rate": 4.786023166484913e-06, |
|
"loss": 3.921, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.32175154688243696, |
|
"grad_norm": 7.168651103973389, |
|
"learning_rate": 4.498051473045291e-06, |
|
"loss": 3.9259, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3236554021894336, |
|
"grad_norm": 7.944221496582031, |
|
"learning_rate": 4.218418707772886e-06, |
|
"loss": 3.9505, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3255592574964303, |
|
"grad_norm": 7.535454273223877, |
|
"learning_rate": 3.947201319473587e-06, |
|
"loss": 3.976, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.32746311280342694, |
|
"grad_norm": 7.3209075927734375, |
|
"learning_rate": 3.684473456273278e-06, |
|
"loss": 3.9647, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3293669681104236, |
|
"grad_norm": 7.745491981506348, |
|
"learning_rate": 3.4303069453464383e-06, |
|
"loss": 3.6598, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.33127082341742026, |
|
"grad_norm": 7.710866451263428, |
|
"learning_rate": 3.184771273279312e-06, |
|
"loss": 4.0528, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3331746787244169, |
|
"grad_norm": 7.628995895385742, |
|
"learning_rate": 2.947933567072987e-06, |
|
"loss": 4.0274, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.33507853403141363, |
|
"grad_norm": 8.762227058410645, |
|
"learning_rate": 2.719858575791534e-06, |
|
"loss": 4.0017, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.3369823893384103, |
|
"grad_norm": 7.491204738616943, |
|
"learning_rate": 2.500608652860256e-06, |
|
"loss": 4.0085, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.33888624464540695, |
|
"grad_norm": 7.047388076782227, |
|
"learning_rate": 2.2902437390188737e-06, |
|
"loss": 3.5474, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3407900999524036, |
|
"grad_norm": 7.473215579986572, |
|
"learning_rate": 2.0888213459343587e-06, |
|
"loss": 3.9556, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3426939552594003, |
|
"grad_norm": 8.065407752990723, |
|
"learning_rate": 1.8963965404777875e-06, |
|
"loss": 3.7152, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.34459781056639693, |
|
"grad_norm": 7.622908592224121, |
|
"learning_rate": 1.7130219296696263e-06, |
|
"loss": 3.9459, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.34650166587339365, |
|
"grad_norm": 7.5590128898620605, |
|
"learning_rate": 1.5387476462974824e-06, |
|
"loss": 4.0831, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.3484055211803903, |
|
"grad_norm": 7.376468658447266, |
|
"learning_rate": 1.3736213352103147e-06, |
|
"loss": 3.6449, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.35030937648738697, |
|
"grad_norm": 8.028775215148926, |
|
"learning_rate": 1.2176881402928002e-06, |
|
"loss": 3.8625, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.35221323179438363, |
|
"grad_norm": 8.449442863464355, |
|
"learning_rate": 1.0709906921234367e-06, |
|
"loss": 4.0228, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3541170871013803, |
|
"grad_norm": 7.713665008544922, |
|
"learning_rate": 9.33569096319799e-07, |
|
"loss": 3.9626, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.35602094240837695, |
|
"grad_norm": 8.763521194458008, |
|
"learning_rate": 8.054609225740255e-07, |
|
"loss": 4.0449, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3579247977153736, |
|
"grad_norm": 7.908934593200684, |
|
"learning_rate": 6.867011943816724e-07, |
|
"loss": 3.8442, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.3598286530223703, |
|
"grad_norm": 9.312561988830566, |
|
"learning_rate": 5.77322379466617e-07, |
|
"loss": 4.4175, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.361732508329367, |
|
"grad_norm": 8.53190803527832, |
|
"learning_rate": 4.773543809047186e-07, |
|
"loss": 4.0274, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 7.50021505355835, |
|
"learning_rate": 3.868245289486027e-07, |
|
"loss": 3.4893, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.3655402189433603, |
|
"grad_norm": 8.497869491577148, |
|
"learning_rate": 3.0575757355586817e-07, |
|
"loss": 4.0881, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.36744407425035697, |
|
"grad_norm": 7.563754081726074, |
|
"learning_rate": 2.3417567762266497e-07, |
|
"loss": 3.9095, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.3693479295573536, |
|
"grad_norm": 8.050283432006836, |
|
"learning_rate": 1.7209841092460043e-07, |
|
"loss": 3.9385, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3712517848643503, |
|
"grad_norm": 8.874429702758789, |
|
"learning_rate": 1.1954274476655534e-07, |
|
"loss": 4.371, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.373155640171347, |
|
"grad_norm": 9.123085021972656, |
|
"learning_rate": 7.652304734289127e-08, |
|
"loss": 4.1202, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.37505949547834366, |
|
"grad_norm": 9.050190925598145, |
|
"learning_rate": 4.30510798093342e-08, |
|
"loss": 4.452, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.3769633507853403, |
|
"grad_norm": 10.816521644592285, |
|
"learning_rate": 1.9135993067588284e-08, |
|
"loss": 4.5865, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.378867206092337, |
|
"grad_norm": 10.921710968017578, |
|
"learning_rate": 4.784325263584854e-09, |
|
"loss": 4.5009, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.38077106139933364, |
|
"grad_norm": 12.262462615966797, |
|
"learning_rate": 0.0, |
|
"loss": 4.617, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.38077106139933364, |
|
"eval_loss": 0.9820159673690796, |
|
"eval_runtime": 65.4623, |
|
"eval_samples_per_second": 13.519, |
|
"eval_steps_per_second": 3.391, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 4, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.861999239200768e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|