|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.0, |
|
"eval_steps": 500, |
|
"global_step": 116, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03508771929824561, |
|
"grad_norm": 2.0671271678531755, |
|
"learning_rate": 0.0, |
|
"loss": 0.866, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.07017543859649122, |
|
"grad_norm": 2.2900714872410477, |
|
"learning_rate": 7.142857142857143e-06, |
|
"loss": 1.043, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.10526315789473684, |
|
"grad_norm": 1.861548862699238, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 0.9408, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.14035087719298245, |
|
"grad_norm": 1.5918887111051039, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 0.9754, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.17543859649122806, |
|
"grad_norm": 2.461157666875476, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 0.9909, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 3.269351348810357, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 0.9765, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.24561403508771928, |
|
"grad_norm": 2.891039729725391, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 0.976, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.2807017543859649, |
|
"grad_norm": 1.7833187830275166, |
|
"learning_rate": 5e-05, |
|
"loss": 0.985, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.3157894736842105, |
|
"grad_norm": 1.6508430392003424, |
|
"learning_rate": 4.9993025930300686e-05, |
|
"loss": 0.9983, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.3508771929824561, |
|
"grad_norm": 1.3325514749958667, |
|
"learning_rate": 4.99721076122146e-05, |
|
"loss": 0.9611, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.38596491228070173, |
|
"grad_norm": 1.1508459362776429, |
|
"learning_rate": 4.9937256716606394e-05, |
|
"loss": 0.8505, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 0.8112441764899212, |
|
"learning_rate": 4.9888492687682096e-05, |
|
"loss": 0.8723, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.45614035087719296, |
|
"grad_norm": 0.8092302145893412, |
|
"learning_rate": 4.982584273214061e-05, |
|
"loss": 0.9396, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.49122807017543857, |
|
"grad_norm": 0.7597090899690478, |
|
"learning_rate": 4.9749341803994465e-05, |
|
"loss": 0.8441, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 0.6679585780372033, |
|
"learning_rate": 4.965903258506806e-05, |
|
"loss": 0.8723, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.5614035087719298, |
|
"grad_norm": 0.5980287018483266, |
|
"learning_rate": 4.955496546118439e-05, |
|
"loss": 0.8188, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.5964912280701754, |
|
"grad_norm": 0.7845483827995335, |
|
"learning_rate": 4.9437198494053464e-05, |
|
"loss": 0.9303, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.631578947368421, |
|
"grad_norm": 0.5932466105687543, |
|
"learning_rate": 4.9305797388878264e-05, |
|
"loss": 0.8304, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.5537040595976515, |
|
"learning_rate": 4.916083545769607e-05, |
|
"loss": 0.8943, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.7017543859649122, |
|
"grad_norm": 0.5092981470635746, |
|
"learning_rate": 4.9002393578475816e-05, |
|
"loss": 0.8906, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.7368421052631579, |
|
"grad_norm": 0.5487294714158888, |
|
"learning_rate": 4.883056014999423e-05, |
|
"loss": 0.8584, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.7719298245614035, |
|
"grad_norm": 0.4922358015626094, |
|
"learning_rate": 4.864543104251587e-05, |
|
"loss": 0.8129, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.8070175438596491, |
|
"grad_norm": 0.5397899665966813, |
|
"learning_rate": 4.8447109544304636e-05, |
|
"loss": 0.8779, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.8421052631578947, |
|
"grad_norm": 0.5043199211637701, |
|
"learning_rate": 4.823570630399665e-05, |
|
"loss": 0.8482, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.8771929824561403, |
|
"grad_norm": 0.4889597930942473, |
|
"learning_rate": 4.8011339268866505e-05, |
|
"loss": 0.8361, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.9122807017543859, |
|
"grad_norm": 0.4769108041878493, |
|
"learning_rate": 4.7774133619021514e-05, |
|
"loss": 0.904, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.9473684210526315, |
|
"grad_norm": 0.41632944677923917, |
|
"learning_rate": 4.752422169756048e-05, |
|
"loss": 0.8032, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.9824561403508771, |
|
"grad_norm": 0.5107558694810208, |
|
"learning_rate": 4.726174293673612e-05, |
|
"loss": 0.8312, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.5107558694810208, |
|
"learning_rate": 4.698684378016222e-05, |
|
"loss": 0.6911, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 1.0350877192982457, |
|
"grad_norm": 0.9504413832780126, |
|
"learning_rate": 4.669967760110908e-05, |
|
"loss": 0.7434, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 1.0701754385964912, |
|
"grad_norm": 0.5169442096859842, |
|
"learning_rate": 4.6400404616932505e-05, |
|
"loss": 0.7058, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 1.1052631578947367, |
|
"grad_norm": 0.5229168796306622, |
|
"learning_rate": 4.608919179968457e-05, |
|
"loss": 0.7504, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 1.1403508771929824, |
|
"grad_norm": 0.7632061755291258, |
|
"learning_rate": 4.576621278295558e-05, |
|
"loss": 0.7216, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 1.1754385964912282, |
|
"grad_norm": 0.6677063053297304, |
|
"learning_rate": 4.5431647764999455e-05, |
|
"loss": 0.6885, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 1.2105263157894737, |
|
"grad_norm": 0.5388255492178547, |
|
"learning_rate": 4.5085683408196535e-05, |
|
"loss": 0.7347, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 1.2456140350877192, |
|
"grad_norm": 0.6102262771882692, |
|
"learning_rate": 4.4728512734909844e-05, |
|
"loss": 0.7483, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 1.280701754385965, |
|
"grad_norm": 0.9784625861220957, |
|
"learning_rate": 4.436033501979299e-05, |
|
"loss": 0.7104, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.3157894736842106, |
|
"grad_norm": 0.5182340717871661, |
|
"learning_rate": 4.398135567860972e-05, |
|
"loss": 0.6926, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.3508771929824561, |
|
"grad_norm": 0.5241406807392717, |
|
"learning_rate": 4.3591786153627247e-05, |
|
"loss": 0.6423, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.3859649122807016, |
|
"grad_norm": 0.5337062588294988, |
|
"learning_rate": 4.319184379564716e-05, |
|
"loss": 0.7449, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.4210526315789473, |
|
"grad_norm": 0.58502320062393, |
|
"learning_rate": 4.2781751742739885e-05, |
|
"loss": 0.6863, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.456140350877193, |
|
"grad_norm": 0.4437314579585024, |
|
"learning_rate": 4.2361738795750214e-05, |
|
"loss": 0.6421, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.4912280701754386, |
|
"grad_norm": 0.4720129405227882, |
|
"learning_rate": 4.193203929064353e-05, |
|
"loss": 0.6387, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.526315789473684, |
|
"grad_norm": 0.49099471978636483, |
|
"learning_rate": 4.1492892967763686e-05, |
|
"loss": 0.7621, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.5614035087719298, |
|
"grad_norm": 0.4870002337399591, |
|
"learning_rate": 4.1044544838075794e-05, |
|
"loss": 0.7271, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.5964912280701755, |
|
"grad_norm": 0.4300326366249684, |
|
"learning_rate": 4.058724504646834e-05, |
|
"loss": 0.6553, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.631578947368421, |
|
"grad_norm": 0.4648072620566617, |
|
"learning_rate": 4.012124873219094e-05, |
|
"loss": 0.6585, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.4704588320844793, |
|
"learning_rate": 3.964681588650562e-05, |
|
"loss": 0.7066, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.7017543859649122, |
|
"grad_norm": 0.4227060942964382, |
|
"learning_rate": 3.916421120763106e-05, |
|
"loss": 0.7298, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.736842105263158, |
|
"grad_norm": 0.47049085759667697, |
|
"learning_rate": 3.867370395306068e-05, |
|
"loss": 0.713, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.7719298245614035, |
|
"grad_norm": 0.41760981184783075, |
|
"learning_rate": 3.817556778933698e-05, |
|
"loss": 0.7196, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.807017543859649, |
|
"grad_norm": 0.5161375525386056, |
|
"learning_rate": 3.7670080639366004e-05, |
|
"loss": 0.7425, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.8421052631578947, |
|
"grad_norm": 0.43748999720890136, |
|
"learning_rate": 3.715752452735704e-05, |
|
"loss": 0.5942, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.8771929824561404, |
|
"grad_norm": 0.5115131837341141, |
|
"learning_rate": 3.6638185421474084e-05, |
|
"loss": 0.7346, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.912280701754386, |
|
"grad_norm": 0.461919359464356, |
|
"learning_rate": 3.61123530742869e-05, |
|
"loss": 0.6743, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.9473684210526314, |
|
"grad_norm": 0.4342888779362966, |
|
"learning_rate": 3.5580320861110625e-05, |
|
"loss": 0.7281, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.9824561403508771, |
|
"grad_norm": 0.48320797418001477, |
|
"learning_rate": 3.504238561632424e-05, |
|
"loss": 0.7326, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.8691230999307578, |
|
"learning_rate": 3.4498847467759e-05, |
|
"loss": 0.6139, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 2.0350877192982457, |
|
"grad_norm": 0.925588727218562, |
|
"learning_rate": 3.3950009669249497e-05, |
|
"loss": 0.5316, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 2.0701754385964914, |
|
"grad_norm": 2.7861744302281886, |
|
"learning_rate": 3.339617843144057e-05, |
|
"loss": 0.6395, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 2.1052631578947367, |
|
"grad_norm": 2.960320751174846, |
|
"learning_rate": 3.2837662750944535e-05, |
|
"loss": 0.5571, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 2.1403508771929824, |
|
"grad_norm": 0.5858200164045075, |
|
"learning_rate": 3.227477423794412e-05, |
|
"loss": 0.5282, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 2.175438596491228, |
|
"grad_norm": 0.7249551445260521, |
|
"learning_rate": 3.170782694233712e-05, |
|
"loss": 0.568, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 2.2105263157894735, |
|
"grad_norm": 0.8322811685066639, |
|
"learning_rate": 3.1137137178519985e-05, |
|
"loss": 0.5588, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 2.245614035087719, |
|
"grad_norm": 0.4688506121750911, |
|
"learning_rate": 3.056302334890786e-05, |
|
"loss": 0.4997, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 2.280701754385965, |
|
"grad_norm": 0.5395406531134912, |
|
"learning_rate": 2.9985805766289817e-05, |
|
"loss": 0.5389, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 2.3157894736842106, |
|
"grad_norm": 0.5972426732470659, |
|
"learning_rate": 2.9405806475118048e-05, |
|
"loss": 0.5154, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 2.3508771929824563, |
|
"grad_norm": 0.5435761853560287, |
|
"learning_rate": 2.882334907183115e-05, |
|
"loss": 0.5696, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 2.3859649122807016, |
|
"grad_norm": 0.4733058013404621, |
|
"learning_rate": 2.8238758524311314e-05, |
|
"loss": 0.5286, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 2.4210526315789473, |
|
"grad_norm": 0.7320896114370156, |
|
"learning_rate": 2.7652360990576453e-05, |
|
"loss": 0.6029, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.456140350877193, |
|
"grad_norm": 0.5143853256014193, |
|
"learning_rate": 2.7064483636808313e-05, |
|
"loss": 0.5373, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 2.4912280701754383, |
|
"grad_norm": 0.47578504969519847, |
|
"learning_rate": 2.6475454454818073e-05, |
|
"loss": 0.5372, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 2.526315789473684, |
|
"grad_norm": 0.47485068019332843, |
|
"learning_rate": 2.5885602079051353e-05, |
|
"loss": 0.4714, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 2.56140350877193, |
|
"grad_norm": 0.5454183913380498, |
|
"learning_rate": 2.529525560323462e-05, |
|
"loss": 0.559, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 2.5964912280701755, |
|
"grad_norm": 0.4805059890683054, |
|
"learning_rate": 2.470474439676539e-05, |
|
"loss": 0.5432, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.6315789473684212, |
|
"grad_norm": 0.549573444590586, |
|
"learning_rate": 2.4114397920948657e-05, |
|
"loss": 0.544, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.44171024079549115, |
|
"learning_rate": 2.3524545545181933e-05, |
|
"loss": 0.5719, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.7017543859649122, |
|
"grad_norm": 0.40375286197219823, |
|
"learning_rate": 2.2935516363191693e-05, |
|
"loss": 0.5574, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 2.736842105263158, |
|
"grad_norm": 0.44079765151450895, |
|
"learning_rate": 2.2347639009423553e-05, |
|
"loss": 0.5473, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 2.7719298245614032, |
|
"grad_norm": 0.3837140844283907, |
|
"learning_rate": 2.1761241475688695e-05, |
|
"loss": 0.5, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 2.807017543859649, |
|
"grad_norm": 0.36498294953041693, |
|
"learning_rate": 2.117665092816885e-05, |
|
"loss": 0.4579, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 2.8421052631578947, |
|
"grad_norm": 0.4176708774075213, |
|
"learning_rate": 2.059419352488196e-05, |
|
"loss": 0.5302, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 2.8771929824561404, |
|
"grad_norm": 0.3801878678793892, |
|
"learning_rate": 2.0014194233710193e-05, |
|
"loss": 0.5338, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 2.912280701754386, |
|
"grad_norm": 0.46768889533074776, |
|
"learning_rate": 1.9436976651092144e-05, |
|
"loss": 0.5636, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 2.9473684210526314, |
|
"grad_norm": 0.41361908756083027, |
|
"learning_rate": 1.8862862821480025e-05, |
|
"loss": 0.4871, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.982456140350877, |
|
"grad_norm": 0.35900229029204755, |
|
"learning_rate": 1.829217305766289e-05, |
|
"loss": 0.545, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.35900229029204755, |
|
"learning_rate": 1.7725225762055887e-05, |
|
"loss": 0.3114, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 3.0350877192982457, |
|
"grad_norm": 1.039088884520627, |
|
"learning_rate": 1.7162337249055477e-05, |
|
"loss": 0.4039, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 3.0701754385964914, |
|
"grad_norm": 1.2225625714935646, |
|
"learning_rate": 1.6603821568559437e-05, |
|
"loss": 0.4477, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 3.1052631578947367, |
|
"grad_norm": 0.517211355092857, |
|
"learning_rate": 1.604999033075051e-05, |
|
"loss": 0.4303, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 3.1403508771929824, |
|
"grad_norm": 1.2522323591078175, |
|
"learning_rate": 1.5501152532241005e-05, |
|
"loss": 0.3948, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 3.175438596491228, |
|
"grad_norm": 1.4369568509391157, |
|
"learning_rate": 1.495761438367577e-05, |
|
"loss": 0.3998, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 3.2105263157894735, |
|
"grad_norm": 1.080289936134096, |
|
"learning_rate": 1.4419679138889378e-05, |
|
"loss": 0.4489, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 3.245614035087719, |
|
"grad_norm": 0.3959735616331335, |
|
"learning_rate": 1.3887646925713116e-05, |
|
"loss": 0.3701, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 3.280701754385965, |
|
"grad_norm": 0.6536961872630214, |
|
"learning_rate": 1.3361814578525922e-05, |
|
"loss": 0.3669, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 3.3157894736842106, |
|
"grad_norm": 0.9236561795720727, |
|
"learning_rate": 1.2842475472642968e-05, |
|
"loss": 0.4101, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 3.3508771929824563, |
|
"grad_norm": 0.624339506754103, |
|
"learning_rate": 1.2329919360634002e-05, |
|
"loss": 0.3797, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 3.3859649122807016, |
|
"grad_norm": 0.4687946351643177, |
|
"learning_rate": 1.182443221066303e-05, |
|
"loss": 0.3414, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 3.4210526315789473, |
|
"grad_norm": 0.47331039977421663, |
|
"learning_rate": 1.1326296046939333e-05, |
|
"loss": 0.3378, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 3.456140350877193, |
|
"grad_norm": 0.4780339258304509, |
|
"learning_rate": 1.083578879236895e-05, |
|
"loss": 0.3148, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 3.4912280701754383, |
|
"grad_norm": 0.472991860964808, |
|
"learning_rate": 1.0353184113494386e-05, |
|
"loss": 0.362, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 3.526315789473684, |
|
"grad_norm": 0.5473896318173218, |
|
"learning_rate": 9.878751267809069e-06, |
|
"loss": 0.366, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 3.56140350877193, |
|
"grad_norm": 0.49421552981457206, |
|
"learning_rate": 9.412754953531663e-06, |
|
"loss": 0.3753, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 3.5964912280701755, |
|
"grad_norm": 0.41594310192216705, |
|
"learning_rate": 8.955455161924217e-06, |
|
"loss": 0.3392, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 3.6315789473684212, |
|
"grad_norm": 0.3728949229584135, |
|
"learning_rate": 8.507107032236322e-06, |
|
"loss": 0.3265, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 3.6666666666666665, |
|
"grad_norm": 0.4105187771371921, |
|
"learning_rate": 8.067960709356478e-06, |
|
"loss": 0.3411, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 3.7017543859649122, |
|
"grad_norm": 0.4016073311581445, |
|
"learning_rate": 7.638261204249784e-06, |
|
"loss": 0.3637, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 3.736842105263158, |
|
"grad_norm": 0.5286770481220647, |
|
"learning_rate": 7.218248257260127e-06, |
|
"loss": 0.4157, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 3.7719298245614032, |
|
"grad_norm": 0.3890055642916449, |
|
"learning_rate": 6.8081562043528445e-06, |
|
"loss": 0.4434, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 3.807017543859649, |
|
"grad_norm": 0.38635140940193147, |
|
"learning_rate": 6.40821384637276e-06, |
|
"loss": 0.367, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 3.8421052631578947, |
|
"grad_norm": 0.37252947829119043, |
|
"learning_rate": 6.018644321390288e-06, |
|
"loss": 0.3714, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 3.8771929824561404, |
|
"grad_norm": 0.3916205714633107, |
|
"learning_rate": 5.639664980207024e-06, |
|
"loss": 0.333, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 3.912280701754386, |
|
"grad_norm": 0.39308963657720947, |
|
"learning_rate": 5.271487265090163e-06, |
|
"loss": 0.3425, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 3.9473684210526314, |
|
"grad_norm": 0.34722098404157115, |
|
"learning_rate": 4.914316591803475e-06, |
|
"loss": 0.3638, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 3.982456140350877, |
|
"grad_norm": 0.33647207931419637, |
|
"learning_rate": 4.56835223500055e-06, |
|
"loss": 0.3789, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.5236255764345163, |
|
"learning_rate": 4.23378721704443e-06, |
|
"loss": 0.2909, |
|
"step": 116 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 140, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 52227047817216.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|