|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 0, |
|
"global_step": 182, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.005494505494505495, |
|
"grad_norm": 4.009056091308594, |
|
"learning_rate": 1e-05, |
|
"loss": 1.066, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01098901098901099, |
|
"grad_norm": 3.5878148078918457, |
|
"learning_rate": 9.999255120204248e-06, |
|
"loss": 1.2577, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.016483516483516484, |
|
"grad_norm": 1.3414483070373535, |
|
"learning_rate": 9.997020702755353e-06, |
|
"loss": 0.9963, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02197802197802198, |
|
"grad_norm": 1.1858597993850708, |
|
"learning_rate": 9.993297413402282e-06, |
|
"loss": 1.0478, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.027472527472527472, |
|
"grad_norm": 1.5414141416549683, |
|
"learning_rate": 9.98808636150624e-06, |
|
"loss": 0.9417, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03296703296703297, |
|
"grad_norm": 1.204790711402893, |
|
"learning_rate": 9.981389099710132e-06, |
|
"loss": 0.9123, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.038461538461538464, |
|
"grad_norm": 0.8749034404754639, |
|
"learning_rate": 9.973207623475964e-06, |
|
"loss": 0.8777, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04395604395604396, |
|
"grad_norm": 0.6556856036186218, |
|
"learning_rate": 9.96354437049027e-06, |
|
"loss": 0.8666, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04945054945054945, |
|
"grad_norm": 0.646369457244873, |
|
"learning_rate": 9.952402219937817e-06, |
|
"loss": 0.8594, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.054945054945054944, |
|
"grad_norm": 0.48498281836509705, |
|
"learning_rate": 9.939784491643734e-06, |
|
"loss": 0.7966, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06043956043956044, |
|
"grad_norm": 0.6364641785621643, |
|
"learning_rate": 9.925694945084369e-06, |
|
"loss": 0.7221, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.06593406593406594, |
|
"grad_norm": 0.49336859583854675, |
|
"learning_rate": 9.910137778267153e-06, |
|
"loss": 0.7939, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.07142857142857142, |
|
"grad_norm": 0.601174533367157, |
|
"learning_rate": 9.893117626479778e-06, |
|
"loss": 0.9353, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.07692307692307693, |
|
"grad_norm": 0.5266357064247131, |
|
"learning_rate": 9.874639560909118e-06, |
|
"loss": 0.8319, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.08241758241758242, |
|
"grad_norm": 0.438719779253006, |
|
"learning_rate": 9.854709087130261e-06, |
|
"loss": 0.8717, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08791208791208792, |
|
"grad_norm": 0.5005918741226196, |
|
"learning_rate": 9.833332143466099e-06, |
|
"loss": 0.8633, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.09340659340659341, |
|
"grad_norm": 0.43495941162109375, |
|
"learning_rate": 9.810515099218004e-06, |
|
"loss": 0.7982, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0989010989010989, |
|
"grad_norm": 0.4354497194290161, |
|
"learning_rate": 9.78626475276808e-06, |
|
"loss": 0.8322, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1043956043956044, |
|
"grad_norm": 0.5018942356109619, |
|
"learning_rate": 9.76058832955357e-06, |
|
"loss": 0.7082, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.10989010989010989, |
|
"grad_norm": 0.5002418160438538, |
|
"learning_rate": 9.733493479914031e-06, |
|
"loss": 0.7597, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.11538461538461539, |
|
"grad_norm": 0.4080904722213745, |
|
"learning_rate": 9.704988276811883e-06, |
|
"loss": 0.7219, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.12087912087912088, |
|
"grad_norm": 0.39263999462127686, |
|
"learning_rate": 9.675081213427076e-06, |
|
"loss": 0.6776, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.12637362637362637, |
|
"grad_norm": 0.5182728171348572, |
|
"learning_rate": 9.643781200626512e-06, |
|
"loss": 0.7313, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.13186813186813187, |
|
"grad_norm": 0.47315528988838196, |
|
"learning_rate": 9.611097564309054e-06, |
|
"loss": 0.7784, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.13736263736263737, |
|
"grad_norm": 0.48023274540901184, |
|
"learning_rate": 9.577040042626832e-06, |
|
"loss": 0.8805, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 0.506425142288208, |
|
"learning_rate": 9.54161878308377e-06, |
|
"loss": 0.6775, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.14835164835164835, |
|
"grad_norm": 0.5148679614067078, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 0.798, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.15384615384615385, |
|
"grad_norm": 0.4537680149078369, |
|
"learning_rate": 9.466727668927817e-06, |
|
"loss": 0.8302, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.15934065934065933, |
|
"grad_norm": 0.46178528666496277, |
|
"learning_rate": 9.427280128266049e-06, |
|
"loss": 0.7257, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.16483516483516483, |
|
"grad_norm": 0.5894891023635864, |
|
"learning_rate": 9.38651347099721e-06, |
|
"loss": 0.7565, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.17032967032967034, |
|
"grad_norm": 0.4654892086982727, |
|
"learning_rate": 9.344439843625034e-06, |
|
"loss": 0.6912, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.17582417582417584, |
|
"grad_norm": 0.5982049107551575, |
|
"learning_rate": 9.301071782067504e-06, |
|
"loss": 0.8083, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.1813186813186813, |
|
"grad_norm": 1.3997434377670288, |
|
"learning_rate": 9.256422207921757e-06, |
|
"loss": 0.7089, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.18681318681318682, |
|
"grad_norm": 0.7488532066345215, |
|
"learning_rate": 9.21050442461406e-06, |
|
"loss": 0.7641, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.19230769230769232, |
|
"grad_norm": 1.0722405910491943, |
|
"learning_rate": 9.163332113436031e-06, |
|
"loss": 0.7228, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1978021978021978, |
|
"grad_norm": 0.390733927488327, |
|
"learning_rate": 9.114919329468283e-06, |
|
"loss": 0.6956, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.2032967032967033, |
|
"grad_norm": 0.5619770288467407, |
|
"learning_rate": 9.065280497392663e-06, |
|
"loss": 0.7027, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.2087912087912088, |
|
"grad_norm": 0.5357697606086731, |
|
"learning_rate": 9.014430407194413e-06, |
|
"loss": 0.7585, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.21428571428571427, |
|
"grad_norm": 0.3587185740470886, |
|
"learning_rate": 8.962384209755453e-06, |
|
"loss": 0.7046, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.21978021978021978, |
|
"grad_norm": 0.3823581039905548, |
|
"learning_rate": 8.90915741234015e-06, |
|
"loss": 0.6748, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.22527472527472528, |
|
"grad_norm": 0.40263715386390686, |
|
"learning_rate": 8.854765873974898e-06, |
|
"loss": 0.7055, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.23076923076923078, |
|
"grad_norm": 0.38500145077705383, |
|
"learning_rate": 8.799225800722895e-06, |
|
"loss": 0.7163, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.23626373626373626, |
|
"grad_norm": 0.3382181227207184, |
|
"learning_rate": 8.742553740855507e-06, |
|
"loss": 0.7276, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.24175824175824176, |
|
"grad_norm": 0.3263402283191681, |
|
"learning_rate": 8.684766579921684e-06, |
|
"loss": 0.652, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.24725274725274726, |
|
"grad_norm": 0.36602887511253357, |
|
"learning_rate": 8.625881535716883e-06, |
|
"loss": 0.659, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.25274725274725274, |
|
"grad_norm": 0.49155181646347046, |
|
"learning_rate": 8.565916153152982e-06, |
|
"loss": 0.6956, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.25824175824175827, |
|
"grad_norm": 0.3172796070575714, |
|
"learning_rate": 8.504888299030748e-06, |
|
"loss": 0.7043, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.26373626373626374, |
|
"grad_norm": 0.3485009968280792, |
|
"learning_rate": 8.442816156716386e-06, |
|
"loss": 0.659, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.2692307692307692, |
|
"grad_norm": 0.3517111539840698, |
|
"learning_rate": 8.379718220723772e-06, |
|
"loss": 0.6044, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.27472527472527475, |
|
"grad_norm": 0.648313581943512, |
|
"learning_rate": 8.315613291203977e-06, |
|
"loss": 0.675, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2802197802197802, |
|
"grad_norm": 0.44727128744125366, |
|
"learning_rate": 8.250520468343722e-06, |
|
"loss": 0.6972, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 0.32652580738067627, |
|
"learning_rate": 8.184459146674447e-06, |
|
"loss": 0.6638, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.29120879120879123, |
|
"grad_norm": 0.3363085091114044, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 0.6792, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.2967032967032967, |
|
"grad_norm": 0.3093494772911072, |
|
"learning_rate": 8.049510022000365e-06, |
|
"loss": 0.6024, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.3021978021978022, |
|
"grad_norm": 0.2993621230125427, |
|
"learning_rate": 7.980662427346127e-06, |
|
"loss": 0.6222, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.3076923076923077, |
|
"grad_norm": 0.3341246247291565, |
|
"learning_rate": 7.910926738603855e-06, |
|
"loss": 0.5864, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.3131868131868132, |
|
"grad_norm": 0.3726695775985718, |
|
"learning_rate": 7.84032373365578e-06, |
|
"loss": 0.692, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.31868131868131866, |
|
"grad_norm": 0.32372209429740906, |
|
"learning_rate": 7.768874448802665e-06, |
|
"loss": 0.6933, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.3241758241758242, |
|
"grad_norm": 0.3319871723651886, |
|
"learning_rate": 7.696600172495997e-06, |
|
"loss": 0.6573, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.32967032967032966, |
|
"grad_norm": 0.3265295922756195, |
|
"learning_rate": 7.62352243899504e-06, |
|
"loss": 0.7431, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.33516483516483514, |
|
"grad_norm": 0.39685407280921936, |
|
"learning_rate": 7.5496630219506805e-06, |
|
"loss": 0.6352, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.34065934065934067, |
|
"grad_norm": 0.41012343764305115, |
|
"learning_rate": 7.475043927917908e-06, |
|
"loss": 0.7049, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.34615384615384615, |
|
"grad_norm": 0.3942950367927551, |
|
"learning_rate": 7.399687389798933e-06, |
|
"loss": 0.6382, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.3516483516483517, |
|
"grad_norm": 0.33018234372138977, |
|
"learning_rate": 7.323615860218844e-06, |
|
"loss": 0.5913, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.35714285714285715, |
|
"grad_norm": 0.3370421528816223, |
|
"learning_rate": 7.246852004835807e-06, |
|
"loss": 0.6589, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3626373626373626, |
|
"grad_norm": 0.34546154737472534, |
|
"learning_rate": 7.169418695587791e-06, |
|
"loss": 0.6575, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.36813186813186816, |
|
"grad_norm": 0.37000104784965515, |
|
"learning_rate": 7.091339003877826e-06, |
|
"loss": 0.6544, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.37362637362637363, |
|
"grad_norm": 0.33996450901031494, |
|
"learning_rate": 7.012636193699838e-06, |
|
"loss": 0.7012, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.3791208791208791, |
|
"grad_norm": 0.33096781373023987, |
|
"learning_rate": 6.933333714707094e-06, |
|
"loss": 0.6396, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 0.324194997549057, |
|
"learning_rate": 6.8534551952253395e-06, |
|
"loss": 0.6584, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3901098901098901, |
|
"grad_norm": 0.333071231842041, |
|
"learning_rate": 6.773024435212678e-06, |
|
"loss": 0.6162, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3956043956043956, |
|
"grad_norm": 0.3411756455898285, |
|
"learning_rate": 6.692065399168352e-06, |
|
"loss": 0.6693, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.4010989010989011, |
|
"grad_norm": 0.31278958916664124, |
|
"learning_rate": 6.6106022089924535e-06, |
|
"loss": 0.6217, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.4065934065934066, |
|
"grad_norm": 0.33422794938087463, |
|
"learning_rate": 6.5286591367987655e-06, |
|
"loss": 0.6016, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.41208791208791207, |
|
"grad_norm": 0.3279259502887726, |
|
"learning_rate": 6.4462605976828395e-06, |
|
"loss": 0.6261, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.4175824175824176, |
|
"grad_norm": 0.35578882694244385, |
|
"learning_rate": 6.363431142447469e-06, |
|
"loss": 0.6231, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.4230769230769231, |
|
"grad_norm": 0.3535009026527405, |
|
"learning_rate": 6.280195450287736e-06, |
|
"loss": 0.6041, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.42857142857142855, |
|
"grad_norm": 0.3417198061943054, |
|
"learning_rate": 6.1965783214377895e-06, |
|
"loss": 0.6653, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.4340659340659341, |
|
"grad_norm": 0.34187737107276917, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 0.6374, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.43956043956043955, |
|
"grad_norm": 0.3478895425796509, |
|
"learning_rate": 6.028299515429683e-06, |
|
"loss": 0.5961, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.44505494505494503, |
|
"grad_norm": 0.33532464504241943, |
|
"learning_rate": 5.943687977264584e-06, |
|
"loss": 0.6086, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.45054945054945056, |
|
"grad_norm": 0.3318541646003723, |
|
"learning_rate": 5.858795265456382e-06, |
|
"loss": 0.6376, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.45604395604395603, |
|
"grad_norm": 0.31934893131256104, |
|
"learning_rate": 5.773646673951406e-06, |
|
"loss": 0.5712, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.46153846153846156, |
|
"grad_norm": 0.3479771018028259, |
|
"learning_rate": 5.688267572935843e-06, |
|
"loss": 0.6943, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.46703296703296704, |
|
"grad_norm": 0.31551942229270935, |
|
"learning_rate": 5.6026834012766155e-06, |
|
"loss": 0.6072, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.4725274725274725, |
|
"grad_norm": 0.3335334360599518, |
|
"learning_rate": 5.51691965894185e-06, |
|
"loss": 0.6284, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.47802197802197804, |
|
"grad_norm": 0.35808464884757996, |
|
"learning_rate": 5.4310018994030974e-06, |
|
"loss": 0.6354, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.4835164835164835, |
|
"grad_norm": 0.309426486492157, |
|
"learning_rate": 5.3449557220216245e-06, |
|
"loss": 0.6109, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.489010989010989, |
|
"grad_norm": 0.30853211879730225, |
|
"learning_rate": 5.258806764421048e-06, |
|
"loss": 0.6391, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.4945054945054945, |
|
"grad_norm": 0.3376154601573944, |
|
"learning_rate": 5.172580694848541e-06, |
|
"loss": 0.6052, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.3383801579475403, |
|
"learning_rate": 5.0863032045269435e-06, |
|
"loss": 0.5901, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.5054945054945055, |
|
"grad_norm": 0.3549947440624237, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6071, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.510989010989011, |
|
"grad_norm": 0.32599008083343506, |
|
"learning_rate": 4.913696795473058e-06, |
|
"loss": 0.716, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.5164835164835165, |
|
"grad_norm": 0.35090532898902893, |
|
"learning_rate": 4.827419305151461e-06, |
|
"loss": 0.6547, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.521978021978022, |
|
"grad_norm": 0.3721665143966675, |
|
"learning_rate": 4.741193235578953e-06, |
|
"loss": 0.6321, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.5274725274725275, |
|
"grad_norm": 0.34609922766685486, |
|
"learning_rate": 4.6550442779783755e-06, |
|
"loss": 0.6127, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.532967032967033, |
|
"grad_norm": 0.36226555705070496, |
|
"learning_rate": 4.568998100596903e-06, |
|
"loss": 0.6136, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.5384615384615384, |
|
"grad_norm": 0.3137187659740448, |
|
"learning_rate": 4.4830803410581506e-06, |
|
"loss": 0.5762, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.5439560439560439, |
|
"grad_norm": 0.32388895750045776, |
|
"learning_rate": 4.397316598723385e-06, |
|
"loss": 0.6225, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.5494505494505495, |
|
"grad_norm": 0.36423254013061523, |
|
"learning_rate": 4.31173242706416e-06, |
|
"loss": 0.6298, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.554945054945055, |
|
"grad_norm": 0.34424489736557007, |
|
"learning_rate": 4.226353326048594e-06, |
|
"loss": 0.6297, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.5604395604395604, |
|
"grad_norm": 0.34294191002845764, |
|
"learning_rate": 4.14120473454362e-06, |
|
"loss": 0.5517, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.5659340659340659, |
|
"grad_norm": 0.3467578887939453, |
|
"learning_rate": 4.056312022735417e-06, |
|
"loss": 0.6647, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.37314093112945557, |
|
"learning_rate": 3.9717004845703175e-06, |
|
"loss": 0.5991, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.5769230769230769, |
|
"grad_norm": 0.34213563799858093, |
|
"learning_rate": 3.887395330218429e-06, |
|
"loss": 0.5774, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.5824175824175825, |
|
"grad_norm": 0.33560436964035034, |
|
"learning_rate": 3.803421678562213e-06, |
|
"loss": 0.6627, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.5879120879120879, |
|
"grad_norm": 0.3514181077480316, |
|
"learning_rate": 3.7198045497122647e-06, |
|
"loss": 0.5307, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.5934065934065934, |
|
"grad_norm": 0.3278910517692566, |
|
"learning_rate": 3.6365688575525315e-06, |
|
"loss": 0.5058, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.5989010989010989, |
|
"grad_norm": 0.33856672048568726, |
|
"learning_rate": 3.553739402317162e-06, |
|
"loss": 0.6254, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.6043956043956044, |
|
"grad_norm": 0.3424539864063263, |
|
"learning_rate": 3.471340863201237e-06, |
|
"loss": 0.5833, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.6098901098901099, |
|
"grad_norm": 0.36081546545028687, |
|
"learning_rate": 3.389397791007548e-06, |
|
"loss": 0.5756, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.6153846153846154, |
|
"grad_norm": 0.41666510701179504, |
|
"learning_rate": 3.307934600831648e-06, |
|
"loss": 0.6555, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.6208791208791209, |
|
"grad_norm": 0.3145431876182556, |
|
"learning_rate": 3.226975564787322e-06, |
|
"loss": 0.5479, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.6263736263736264, |
|
"grad_norm": 0.29985201358795166, |
|
"learning_rate": 3.1465448047746626e-06, |
|
"loss": 0.5422, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.6318681318681318, |
|
"grad_norm": 0.34139716625213623, |
|
"learning_rate": 3.0666662852929063e-06, |
|
"loss": 0.5531, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.6373626373626373, |
|
"grad_norm": 0.38213226199150085, |
|
"learning_rate": 2.9873638063001633e-06, |
|
"loss": 0.6184, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.6428571428571429, |
|
"grad_norm": 0.3261444568634033, |
|
"learning_rate": 2.9086609961221758e-06, |
|
"loss": 0.578, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.6483516483516484, |
|
"grad_norm": 0.3387126922607422, |
|
"learning_rate": 2.83058130441221e-06, |
|
"loss": 0.5024, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.6538461538461539, |
|
"grad_norm": 0.3375018239021301, |
|
"learning_rate": 2.7531479951641928e-06, |
|
"loss": 0.5994, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.6593406593406593, |
|
"grad_norm": 0.3370437026023865, |
|
"learning_rate": 2.6763841397811576e-06, |
|
"loss": 0.5473, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.6648351648351648, |
|
"grad_norm": 0.3111221194267273, |
|
"learning_rate": 2.6003126102010696e-06, |
|
"loss": 0.5147, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.6703296703296703, |
|
"grad_norm": 0.3582828640937805, |
|
"learning_rate": 2.524956072082093e-06, |
|
"loss": 0.541, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.6758241758241759, |
|
"grad_norm": 0.35231706500053406, |
|
"learning_rate": 2.450336978049322e-06, |
|
"loss": 0.6368, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.6813186813186813, |
|
"grad_norm": 0.36998987197875977, |
|
"learning_rate": 2.37647756100496e-06, |
|
"loss": 0.5267, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.6868131868131868, |
|
"grad_norm": 0.2999013066291809, |
|
"learning_rate": 2.3033998275040047e-06, |
|
"loss": 0.5112, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.6923076923076923, |
|
"grad_norm": 0.3361146152019501, |
|
"learning_rate": 2.2311255511973347e-06, |
|
"loss": 0.6337, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.6978021978021978, |
|
"grad_norm": 0.3418031334877014, |
|
"learning_rate": 2.159676266344222e-06, |
|
"loss": 0.6365, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.7032967032967034, |
|
"grad_norm": 0.3264194130897522, |
|
"learning_rate": 2.089073261396148e-06, |
|
"loss": 0.5767, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.7087912087912088, |
|
"grad_norm": 0.3361995220184326, |
|
"learning_rate": 2.0193375726538737e-06, |
|
"loss": 0.5865, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 0.3277769982814789, |
|
"learning_rate": 1.9504899779996354e-06, |
|
"loss": 0.5779, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.7197802197802198, |
|
"grad_norm": 0.38124844431877136, |
|
"learning_rate": 1.8825509907063328e-06, |
|
"loss": 0.5856, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.7252747252747253, |
|
"grad_norm": 0.31078073382377625, |
|
"learning_rate": 1.8155408533255553e-06, |
|
"loss": 0.5712, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.7307692307692307, |
|
"grad_norm": 0.3475596010684967, |
|
"learning_rate": 1.7494795316562791e-06, |
|
"loss": 0.6173, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.7362637362637363, |
|
"grad_norm": 0.3320766091346741, |
|
"learning_rate": 1.6843867087960252e-06, |
|
"loss": 0.5245, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.7417582417582418, |
|
"grad_norm": 0.32898691296577454, |
|
"learning_rate": 1.6202817792762283e-06, |
|
"loss": 0.5614, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.7472527472527473, |
|
"grad_norm": 0.2987188398838043, |
|
"learning_rate": 1.557183843283614e-06, |
|
"loss": 0.5352, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.7527472527472527, |
|
"grad_norm": 0.3136436939239502, |
|
"learning_rate": 1.4951117009692528e-06, |
|
"loss": 0.5169, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.7582417582417582, |
|
"grad_norm": 0.3560352325439453, |
|
"learning_rate": 1.4340838468470198e-06, |
|
"loss": 0.5796, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.7637362637362637, |
|
"grad_norm": 0.37465450167655945, |
|
"learning_rate": 1.374118464283119e-06, |
|
"loss": 0.6766, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 0.3289940059185028, |
|
"learning_rate": 1.3152334200783167e-06, |
|
"loss": 0.547, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.7747252747252747, |
|
"grad_norm": 0.32682889699935913, |
|
"learning_rate": 1.257446259144494e-06, |
|
"loss": 0.5594, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.7802197802197802, |
|
"grad_norm": 0.32840487360954285, |
|
"learning_rate": 1.2007741992771065e-06, |
|
"loss": 0.5352, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.7857142857142857, |
|
"grad_norm": 0.33606359362602234, |
|
"learning_rate": 1.145234126025102e-06, |
|
"loss": 0.5844, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.7912087912087912, |
|
"grad_norm": 0.3465987741947174, |
|
"learning_rate": 1.0908425876598512e-06, |
|
"loss": 0.5795, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.7967032967032966, |
|
"grad_norm": 0.35972467064857483, |
|
"learning_rate": 1.037615790244549e-06, |
|
"loss": 0.5577, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.8021978021978022, |
|
"grad_norm": 0.3265731632709503, |
|
"learning_rate": 9.85569592805588e-07, |
|
"loss": 0.5103, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.8076923076923077, |
|
"grad_norm": 0.34925973415374756, |
|
"learning_rate": 9.347195026073369e-07, |
|
"loss": 0.627, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.8131868131868132, |
|
"grad_norm": 0.31874677538871765, |
|
"learning_rate": 8.850806705317183e-07, |
|
"loss": 0.5955, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.8186813186813187, |
|
"grad_norm": 0.3261333703994751, |
|
"learning_rate": 8.366678865639688e-07, |
|
"loss": 0.5587, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.8241758241758241, |
|
"grad_norm": 0.3123181164264679, |
|
"learning_rate": 7.894955753859412e-07, |
|
"loss": 0.5509, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.8296703296703297, |
|
"grad_norm": 0.3487018346786499, |
|
"learning_rate": 7.435777920782444e-07, |
|
"loss": 0.5676, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.8351648351648352, |
|
"grad_norm": 0.38510191440582275, |
|
"learning_rate": 6.989282179324963e-07, |
|
"loss": 0.5844, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.8406593406593407, |
|
"grad_norm": 0.32092976570129395, |
|
"learning_rate": 6.555601563749675e-07, |
|
"loss": 0.5582, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.8461538461538461, |
|
"grad_norm": 0.335268497467041, |
|
"learning_rate": 6.134865290027903e-07, |
|
"loss": 0.6081, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.8516483516483516, |
|
"grad_norm": 0.3320220708847046, |
|
"learning_rate": 5.727198717339511e-07, |
|
"loss": 0.5944, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 0.3156368136405945, |
|
"learning_rate": 5.332723310721855e-07, |
|
"loss": 0.5586, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.8626373626373627, |
|
"grad_norm": 0.33842095732688904, |
|
"learning_rate": 4.951556604879049e-07, |
|
"loss": 0.5046, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.8681318681318682, |
|
"grad_norm": 0.3008866310119629, |
|
"learning_rate": 4.5838121691622995e-07, |
|
"loss": 0.47, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.8736263736263736, |
|
"grad_norm": 0.31621789932250977, |
|
"learning_rate": 4.2295995737316854e-07, |
|
"loss": 0.5633, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.8791208791208791, |
|
"grad_norm": 0.31777408719062805, |
|
"learning_rate": 3.8890243569094874e-07, |
|
"loss": 0.5113, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.8846153846153846, |
|
"grad_norm": 0.3305305540561676, |
|
"learning_rate": 3.5621879937348836e-07, |
|
"loss": 0.5524, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.8901098901098901, |
|
"grad_norm": 0.3152499496936798, |
|
"learning_rate": 3.2491878657292643e-07, |
|
"loss": 0.5464, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.8956043956043956, |
|
"grad_norm": 0.3355554938316345, |
|
"learning_rate": 2.9501172318811834e-07, |
|
"loss": 0.5729, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.9010989010989011, |
|
"grad_norm": 0.33386698365211487, |
|
"learning_rate": 2.6650652008597067e-07, |
|
"loss": 0.5825, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.9065934065934066, |
|
"grad_norm": 0.35399433970451355, |
|
"learning_rate": 2.394116704464294e-07, |
|
"loss": 0.5816, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.9120879120879121, |
|
"grad_norm": 0.34380120038986206, |
|
"learning_rate": 2.137352472319215e-07, |
|
"loss": 0.5894, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.9175824175824175, |
|
"grad_norm": 0.32450902462005615, |
|
"learning_rate": 1.8948490078199767e-07, |
|
"loss": 0.5778, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"grad_norm": 0.293279230594635, |
|
"learning_rate": 1.666678565339025e-07, |
|
"loss": 0.4685, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.9285714285714286, |
|
"grad_norm": 0.3483200967311859, |
|
"learning_rate": 1.4529091286973994e-07, |
|
"loss": 0.6355, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.9340659340659341, |
|
"grad_norm": 0.3036319613456726, |
|
"learning_rate": 1.253604390908819e-07, |
|
"loss": 0.5692, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.9395604395604396, |
|
"grad_norm": 0.3103819787502289, |
|
"learning_rate": 1.0688237352022346e-07, |
|
"loss": 0.5156, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.945054945054945, |
|
"grad_norm": 0.30952194333076477, |
|
"learning_rate": 8.986222173284876e-08, |
|
"loss": 0.511, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.9505494505494505, |
|
"grad_norm": 0.31444427371025085, |
|
"learning_rate": 7.430505491563101e-08, |
|
"loss": 0.4911, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.9560439560439561, |
|
"grad_norm": 0.4444173574447632, |
|
"learning_rate": 6.021550835626777e-08, |
|
"loss": 0.5957, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.9615384615384616, |
|
"grad_norm": 0.3321171700954437, |
|
"learning_rate": 4.759778006218407e-08, |
|
"loss": 0.6213, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.967032967032967, |
|
"grad_norm": 0.2999350428581238, |
|
"learning_rate": 3.645562950973014e-08, |
|
"loss": 0.5252, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.9725274725274725, |
|
"grad_norm": 0.33736324310302734, |
|
"learning_rate": 2.6792376524036878e-08, |
|
"loss": 0.6088, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.978021978021978, |
|
"grad_norm": 0.33570563793182373, |
|
"learning_rate": 1.8610900289867673e-08, |
|
"loss": 0.6039, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.9835164835164835, |
|
"grad_norm": 0.34118375182151794, |
|
"learning_rate": 1.1913638493762369e-08, |
|
"loss": 0.5748, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.989010989010989, |
|
"grad_norm": 0.31392279267311096, |
|
"learning_rate": 6.702586597719385e-09, |
|
"loss": 0.5903, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.9945054945054945, |
|
"grad_norm": 0.357446551322937, |
|
"learning_rate": 2.9792972446479605e-09, |
|
"loss": 0.5262, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.31577834486961365, |
|
"learning_rate": 7.448797957526621e-10, |
|
"loss": 0.4734, |
|
"step": 182 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 182, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 0, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.340409498200965e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|