|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.18066847335140018, |
|
"eval_steps": 500, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0018066847335140017, |
|
"grad_norm": 2.834148994241387, |
|
"learning_rate": 0.0, |
|
"loss": 0.8251, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0036133694670280035, |
|
"grad_norm": 2.8814188855606915, |
|
"learning_rate": 3.5714285714285716e-07, |
|
"loss": 0.8284, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005420054200542005, |
|
"grad_norm": 2.8877082225775945, |
|
"learning_rate": 7.142857142857143e-07, |
|
"loss": 0.8432, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.007226738934056007, |
|
"grad_norm": 2.84706824634251, |
|
"learning_rate": 1.0714285714285714e-06, |
|
"loss": 0.8395, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009033423667570008, |
|
"grad_norm": 2.763620649396407, |
|
"learning_rate": 1.4285714285714286e-06, |
|
"loss": 0.8291, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01084010840108401, |
|
"grad_norm": 2.7250326443499935, |
|
"learning_rate": 1.7857142857142859e-06, |
|
"loss": 0.8289, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012646793134598013, |
|
"grad_norm": 2.5921911890865355, |
|
"learning_rate": 2.1428571428571427e-06, |
|
"loss": 0.8081, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.014453477868112014, |
|
"grad_norm": 2.5976860246328948, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.8152, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.016260162601626018, |
|
"grad_norm": 2.1600249112189367, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 0.8041, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.018066847335140017, |
|
"grad_norm": 2.05676893822907, |
|
"learning_rate": 3.2142857142857147e-06, |
|
"loss": 0.7978, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01987353206865402, |
|
"grad_norm": 1.5227088422406245, |
|
"learning_rate": 3.5714285714285718e-06, |
|
"loss": 0.7766, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02168021680216802, |
|
"grad_norm": 1.4305134864471831, |
|
"learning_rate": 3.928571428571429e-06, |
|
"loss": 0.7865, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.023486901535682024, |
|
"grad_norm": 1.3403916222492462, |
|
"learning_rate": 4.2857142857142855e-06, |
|
"loss": 0.7734, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.025293586269196026, |
|
"grad_norm": 1.2438356291232484, |
|
"learning_rate": 4.642857142857144e-06, |
|
"loss": 0.7668, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02710027100271003, |
|
"grad_norm": 1.4470043220956499, |
|
"learning_rate": 5e-06, |
|
"loss": 0.7518, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.028906955736224028, |
|
"grad_norm": 1.6508105965347686, |
|
"learning_rate": 5.357142857142857e-06, |
|
"loss": 0.7428, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03071364046973803, |
|
"grad_norm": 1.8197388085593902, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 0.7504, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.032520325203252036, |
|
"grad_norm": 1.6628024493622566, |
|
"learning_rate": 6.071428571428571e-06, |
|
"loss": 0.7406, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03432700993676603, |
|
"grad_norm": 1.3996342649224327, |
|
"learning_rate": 6.4285714285714295e-06, |
|
"loss": 0.7317, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.036133694670280034, |
|
"grad_norm": 0.8529424931984829, |
|
"learning_rate": 6.785714285714287e-06, |
|
"loss": 0.7029, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.037940379403794036, |
|
"grad_norm": 1.012307815845279, |
|
"learning_rate": 7.1428571428571436e-06, |
|
"loss": 0.7052, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03974706413730804, |
|
"grad_norm": 0.996083810328283, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.6937, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04155374887082204, |
|
"grad_norm": 0.9027655260544101, |
|
"learning_rate": 7.857142857142858e-06, |
|
"loss": 0.691, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04336043360433604, |
|
"grad_norm": 1.9068544177754214, |
|
"learning_rate": 8.214285714285714e-06, |
|
"loss": 0.697, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.045167118337850046, |
|
"grad_norm": 0.7710305137202722, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 0.677, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04697380307136405, |
|
"grad_norm": 0.6673976579110235, |
|
"learning_rate": 8.92857142857143e-06, |
|
"loss": 0.6911, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04878048780487805, |
|
"grad_norm": 0.6017238871929963, |
|
"learning_rate": 9.285714285714288e-06, |
|
"loss": 0.667, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05058717253839205, |
|
"grad_norm": 0.6176897562760387, |
|
"learning_rate": 9.642857142857144e-06, |
|
"loss": 0.6685, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.052393857271906055, |
|
"grad_norm": 0.5460140411641373, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6788, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05420054200542006, |
|
"grad_norm": 0.5024441002282133, |
|
"learning_rate": 9.999910480045805e-06, |
|
"loss": 0.6776, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05600722673893405, |
|
"grad_norm": 0.5327568235598038, |
|
"learning_rate": 9.999641923388745e-06, |
|
"loss": 0.666, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.057813911472448055, |
|
"grad_norm": 0.45860832293770903, |
|
"learning_rate": 9.999194339645292e-06, |
|
"loss": 0.6537, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05962059620596206, |
|
"grad_norm": 0.462200098378075, |
|
"learning_rate": 9.998567744842518e-06, |
|
"loss": 0.6639, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06142728093947606, |
|
"grad_norm": 0.44108437546829815, |
|
"learning_rate": 9.997762161417517e-06, |
|
"loss": 0.6507, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06323396567299007, |
|
"grad_norm": 0.4056783549136984, |
|
"learning_rate": 9.996777618216608e-06, |
|
"loss": 0.6559, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06504065040650407, |
|
"grad_norm": 0.3860777271649062, |
|
"learning_rate": 9.995614150494293e-06, |
|
"loss": 0.6503, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06684733514001806, |
|
"grad_norm": 0.434533435588928, |
|
"learning_rate": 9.994271799912004e-06, |
|
"loss": 0.6541, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06865401987353206, |
|
"grad_norm": 0.4371779969673632, |
|
"learning_rate": 9.992750614536606e-06, |
|
"loss": 0.6469, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07046070460704607, |
|
"grad_norm": 0.36457045763018364, |
|
"learning_rate": 9.991050648838676e-06, |
|
"loss": 0.6475, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07226738934056007, |
|
"grad_norm": 0.322635334253873, |
|
"learning_rate": 9.989171963690556e-06, |
|
"loss": 0.6366, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 0.35059303076021425, |
|
"learning_rate": 9.987114626364172e-06, |
|
"loss": 0.6431, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07588075880758807, |
|
"grad_norm": 0.36612621878509766, |
|
"learning_rate": 9.984878710528615e-06, |
|
"loss": 0.6339, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07768744354110207, |
|
"grad_norm": 0.3365894359412745, |
|
"learning_rate": 9.982464296247523e-06, |
|
"loss": 0.6343, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07949412827461608, |
|
"grad_norm": 0.3035590675895417, |
|
"learning_rate": 9.979871469976197e-06, |
|
"loss": 0.6275, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08130081300813008, |
|
"grad_norm": 0.2858077116353655, |
|
"learning_rate": 9.97710032455851e-06, |
|
"loss": 0.6344, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08310749774164408, |
|
"grad_norm": 0.2752507660143639, |
|
"learning_rate": 9.974150959223591e-06, |
|
"loss": 0.6362, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08491418247515808, |
|
"grad_norm": 0.3109343353140871, |
|
"learning_rate": 9.971023479582258e-06, |
|
"loss": 0.6389, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08672086720867209, |
|
"grad_norm": 0.27841301256018586, |
|
"learning_rate": 9.967717997623245e-06, |
|
"loss": 0.6256, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08852755194218609, |
|
"grad_norm": 0.3002752587221535, |
|
"learning_rate": 9.964234631709188e-06, |
|
"loss": 0.6316, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09033423667570009, |
|
"grad_norm": 0.24995397436718791, |
|
"learning_rate": 9.960573506572391e-06, |
|
"loss": 0.6303, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0921409214092141, |
|
"grad_norm": 0.2592033541282412, |
|
"learning_rate": 9.956734753310355e-06, |
|
"loss": 0.6199, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0939476061427281, |
|
"grad_norm": 0.26267469782137715, |
|
"learning_rate": 9.952718509381086e-06, |
|
"loss": 0.6381, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0957542908762421, |
|
"grad_norm": 0.2841156370165673, |
|
"learning_rate": 9.948524918598175e-06, |
|
"loss": 0.6223, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0975609756097561, |
|
"grad_norm": 0.2602382902911376, |
|
"learning_rate": 9.944154131125643e-06, |
|
"loss": 0.613, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0993676603432701, |
|
"grad_norm": 0.2548980055658011, |
|
"learning_rate": 9.93960630347257e-06, |
|
"loss": 0.6265, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1011743450767841, |
|
"grad_norm": 0.23755160689461077, |
|
"learning_rate": 9.934881598487478e-06, |
|
"loss": 0.6318, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10298102981029811, |
|
"grad_norm": 0.24890312911017654, |
|
"learning_rate": 9.929980185352525e-06, |
|
"loss": 0.6175, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10478771454381211, |
|
"grad_norm": 0.2773541758032373, |
|
"learning_rate": 9.924902239577419e-06, |
|
"loss": 0.6253, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10659439927732611, |
|
"grad_norm": 0.25242119733836627, |
|
"learning_rate": 9.91964794299315e-06, |
|
"loss": 0.611, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10840108401084012, |
|
"grad_norm": 0.2684984552226452, |
|
"learning_rate": 9.914217483745472e-06, |
|
"loss": 0.6119, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1102077687443541, |
|
"grad_norm": 0.2574590656590093, |
|
"learning_rate": 9.90861105628817e-06, |
|
"loss": 0.6159, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1120144534778681, |
|
"grad_norm": 0.25603682816247697, |
|
"learning_rate": 9.902828861376101e-06, |
|
"loss": 0.621, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11382113821138211, |
|
"grad_norm": 0.24731176189318566, |
|
"learning_rate": 9.896871106057989e-06, |
|
"loss": 0.6205, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11562782294489611, |
|
"grad_norm": 0.27598163635496337, |
|
"learning_rate": 9.890738003669029e-06, |
|
"loss": 0.6188, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11743450767841011, |
|
"grad_norm": 0.26447533420584596, |
|
"learning_rate": 9.884429773823238e-06, |
|
"loss": 0.6134, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11924119241192412, |
|
"grad_norm": 0.2764756892440485, |
|
"learning_rate": 9.877946642405598e-06, |
|
"loss": 0.6153, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12104787714543812, |
|
"grad_norm": 0.2878202327908946, |
|
"learning_rate": 9.871288841563956e-06, |
|
"loss": 0.6057, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12285456187895212, |
|
"grad_norm": 0.25954176321509254, |
|
"learning_rate": 9.864456609700726e-06, |
|
"loss": 0.6213, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12466124661246612, |
|
"grad_norm": 0.25912508931378314, |
|
"learning_rate": 9.857450191464337e-06, |
|
"loss": 0.6233, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12646793134598014, |
|
"grad_norm": 0.2698506612904357, |
|
"learning_rate": 9.85026983774049e-06, |
|
"loss": 0.6285, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12827461607949414, |
|
"grad_norm": 0.23787370000958719, |
|
"learning_rate": 9.842915805643156e-06, |
|
"loss": 0.5996, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13008130081300814, |
|
"grad_norm": 0.21973413712743903, |
|
"learning_rate": 9.835388358505383e-06, |
|
"loss": 0.6171, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13188798554652212, |
|
"grad_norm": 0.2542097049285353, |
|
"learning_rate": 9.827687765869859e-06, |
|
"loss": 0.6159, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13369467028003612, |
|
"grad_norm": 0.28885915694073955, |
|
"learning_rate": 9.819814303479268e-06, |
|
"loss": 0.6081, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13550135501355012, |
|
"grad_norm": 0.2732542861288398, |
|
"learning_rate": 9.811768253266401e-06, |
|
"loss": 0.606, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13730803974706413, |
|
"grad_norm": 0.2639987596902754, |
|
"learning_rate": 9.803549903344081e-06, |
|
"loss": 0.6016, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13911472448057813, |
|
"grad_norm": 0.2625743746400295, |
|
"learning_rate": 9.79515954799483e-06, |
|
"loss": 0.5963, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14092140921409213, |
|
"grad_norm": 0.2636110963817243, |
|
"learning_rate": 9.786597487660336e-06, |
|
"loss": 0.6082, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14272809394760613, |
|
"grad_norm": 0.2548650343445913, |
|
"learning_rate": 9.777864028930705e-06, |
|
"loss": 0.6173, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.14453477868112014, |
|
"grad_norm": 0.24198639697590743, |
|
"learning_rate": 9.768959484533461e-06, |
|
"loss": 0.6262, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14634146341463414, |
|
"grad_norm": 0.23616390413838728, |
|
"learning_rate": 9.75988417332237e-06, |
|
"loss": 0.6087, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 0.24740792444302148, |
|
"learning_rate": 9.750638420266008e-06, |
|
"loss": 0.6023, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14995483288166214, |
|
"grad_norm": 0.2708089867070639, |
|
"learning_rate": 9.741222556436132e-06, |
|
"loss": 0.6133, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15176151761517614, |
|
"grad_norm": 0.23019955730854466, |
|
"learning_rate": 9.731636918995821e-06, |
|
"loss": 0.606, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.15356820234869015, |
|
"grad_norm": 0.24984754231513484, |
|
"learning_rate": 9.721881851187406e-06, |
|
"loss": 0.6082, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15537488708220415, |
|
"grad_norm": 0.2514290283088365, |
|
"learning_rate": 9.711957702320176e-06, |
|
"loss": 0.6082, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15718157181571815, |
|
"grad_norm": 0.25050319285982814, |
|
"learning_rate": 9.701864827757868e-06, |
|
"loss": 0.6103, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15898825654923215, |
|
"grad_norm": 0.27127901136367444, |
|
"learning_rate": 9.691603588905956e-06, |
|
"loss": 0.6146, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16079494128274616, |
|
"grad_norm": 0.2538064222847297, |
|
"learning_rate": 9.681174353198687e-06, |
|
"loss": 0.6103, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.16260162601626016, |
|
"grad_norm": 0.23895346723279925, |
|
"learning_rate": 9.670577494085945e-06, |
|
"loss": 0.6034, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.16440831074977416, |
|
"grad_norm": 0.23015986092903712, |
|
"learning_rate": 9.659813391019867e-06, |
|
"loss": 0.6013, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16621499548328816, |
|
"grad_norm": 0.23826711464547476, |
|
"learning_rate": 9.648882429441258e-06, |
|
"loss": 0.6048, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16802168021680217, |
|
"grad_norm": 0.27338890022372714, |
|
"learning_rate": 9.637785000765789e-06, |
|
"loss": 0.6114, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16982836495031617, |
|
"grad_norm": 0.3426444715164819, |
|
"learning_rate": 9.626521502369984e-06, |
|
"loss": 0.6104, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.17163504968383017, |
|
"grad_norm": 0.23367032192171455, |
|
"learning_rate": 9.615092337576987e-06, |
|
"loss": 0.6027, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17344173441734417, |
|
"grad_norm": 0.25432897179920155, |
|
"learning_rate": 9.603497915642122e-06, |
|
"loss": 0.6016, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17524841915085818, |
|
"grad_norm": 0.26462048665974897, |
|
"learning_rate": 9.591738651738235e-06, |
|
"loss": 0.6073, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17705510388437218, |
|
"grad_norm": 0.24672431475599113, |
|
"learning_rate": 9.579814966940833e-06, |
|
"loss": 0.6013, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.17886178861788618, |
|
"grad_norm": 0.23213124494510456, |
|
"learning_rate": 9.567727288213005e-06, |
|
"loss": 0.6138, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.18066847335140018, |
|
"grad_norm": 0.22763203934971926, |
|
"learning_rate": 9.55547604839013e-06, |
|
"loss": 0.5873, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 553, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 115440686923776.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|