|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.5, |
|
"eval_steps": 37, |
|
"global_step": 219, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00684931506849315, |
|
"grad_norm": 6.781628733365755, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 2.5668, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00684931506849315, |
|
"eval_loss": 2.5806074142456055, |
|
"eval_runtime": 6.5651, |
|
"eval_samples_per_second": 47.524, |
|
"eval_steps_per_second": 3.046, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0136986301369863, |
|
"grad_norm": 6.86619348498488, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 2.6025, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02054794520547945, |
|
"grad_norm": 6.661187717958318, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 2.6316, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0273972602739726, |
|
"grad_norm": 6.861551727845244, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 2.5801, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03424657534246575, |
|
"grad_norm": 6.69117874797922, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 2.5335, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0410958904109589, |
|
"grad_norm": 5.875380376726272, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 2.5241, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04794520547945205, |
|
"grad_norm": 5.7978399687358255, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 2.4675, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0547945205479452, |
|
"grad_norm": 3.954765219007858, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 2.5811, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.06164383561643835, |
|
"grad_norm": 3.8615688857265162, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 2.4886, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0684931506849315, |
|
"grad_norm": 3.1019873404806138, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.5337, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07534246575342465, |
|
"grad_norm": 1.653000190239802, |
|
"learning_rate": 2.2e-06, |
|
"loss": 2.493, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0821917808219178, |
|
"grad_norm": 1.6653405065701912, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 2.4862, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.08904109589041095, |
|
"grad_norm": 1.9193066268748051, |
|
"learning_rate": 2.6e-06, |
|
"loss": 2.5172, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0958904109589041, |
|
"grad_norm": 1.4108595092412395, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 2.4411, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.10273972602739725, |
|
"grad_norm": 2.0335186868451713, |
|
"learning_rate": 3e-06, |
|
"loss": 2.5062, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1095890410958904, |
|
"grad_norm": 1.9834579085161663, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 2.406, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.11643835616438356, |
|
"grad_norm": 1.7772826080066895, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 2.4608, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.1232876712328767, |
|
"grad_norm": 1.3962245609921142, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 2.3551, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.13013698630136986, |
|
"grad_norm": 1.190039563451697, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 2.4188, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.136986301369863, |
|
"grad_norm": 1.0109005845053234, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 2.4981, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.14383561643835616, |
|
"grad_norm": 1.1671978967591745, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 2.319, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.1506849315068493, |
|
"grad_norm": 1.0658877053683267, |
|
"learning_rate": 4.4e-06, |
|
"loss": 2.3067, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.15753424657534246, |
|
"grad_norm": 1.0578186226484172, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 2.4644, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.1643835616438356, |
|
"grad_norm": 0.9493987331518986, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 2.4179, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.17123287671232876, |
|
"grad_norm": 0.8876721262354197, |
|
"learning_rate": 5e-06, |
|
"loss": 2.4548, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1780821917808219, |
|
"grad_norm": 0.9016648720831166, |
|
"learning_rate": 4.999960519285878e-06, |
|
"loss": 2.4203, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.18493150684931506, |
|
"grad_norm": 0.8594911221513131, |
|
"learning_rate": 4.999842078390492e-06, |
|
"loss": 2.4445, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.1917808219178082, |
|
"grad_norm": 0.8294185847621298, |
|
"learning_rate": 4.9996446810547464e-06, |
|
"loss": 2.4286, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.19863013698630136, |
|
"grad_norm": 0.829981358086145, |
|
"learning_rate": 4.999368333513354e-06, |
|
"loss": 2.3941, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2054794520547945, |
|
"grad_norm": 0.8259390860771237, |
|
"learning_rate": 4.99901304449463e-06, |
|
"loss": 2.2873, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.21232876712328766, |
|
"grad_norm": 0.7780493406506134, |
|
"learning_rate": 4.998578825220228e-06, |
|
"loss": 2.4414, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2191780821917808, |
|
"grad_norm": 0.8139739891431346, |
|
"learning_rate": 4.9980656894047776e-06, |
|
"loss": 2.426, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.22602739726027396, |
|
"grad_norm": 0.7967892569249858, |
|
"learning_rate": 4.9974736532554525e-06, |
|
"loss": 2.436, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.2328767123287671, |
|
"grad_norm": 0.7428657720222388, |
|
"learning_rate": 4.996802735471461e-06, |
|
"loss": 2.3185, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.23972602739726026, |
|
"grad_norm": 0.7499845773485214, |
|
"learning_rate": 4.996052957243455e-06, |
|
"loss": 2.4402, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.2465753424657534, |
|
"grad_norm": 0.7359931004513611, |
|
"learning_rate": 4.995224342252856e-06, |
|
"loss": 2.3618, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.2534246575342466, |
|
"grad_norm": 0.7073950294074208, |
|
"learning_rate": 4.994316916671115e-06, |
|
"loss": 2.455, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.2534246575342466, |
|
"eval_loss": 2.4449844360351562, |
|
"eval_runtime": 5.9369, |
|
"eval_samples_per_second": 52.553, |
|
"eval_steps_per_second": 3.369, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.2602739726027397, |
|
"grad_norm": 0.6796375355430841, |
|
"learning_rate": 4.993330709158879e-06, |
|
"loss": 2.4065, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.2671232876712329, |
|
"grad_norm": 0.7250510771173403, |
|
"learning_rate": 4.992265750865091e-06, |
|
"loss": 2.4527, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.273972602739726, |
|
"grad_norm": 0.739133472992406, |
|
"learning_rate": 4.991122075426001e-06, |
|
"loss": 2.4787, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2808219178082192, |
|
"grad_norm": 0.7026481518003141, |
|
"learning_rate": 4.989899718964108e-06, |
|
"loss": 2.4464, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.2876712328767123, |
|
"grad_norm": 0.7034188124515799, |
|
"learning_rate": 4.988598720087015e-06, |
|
"loss": 2.4366, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.2945205479452055, |
|
"grad_norm": 0.7114177997700272, |
|
"learning_rate": 4.9872191198862166e-06, |
|
"loss": 2.3954, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.3013698630136986, |
|
"grad_norm": 0.7489678406861063, |
|
"learning_rate": 4.985760961935791e-06, |
|
"loss": 2.4092, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.3082191780821918, |
|
"grad_norm": 0.721624023056601, |
|
"learning_rate": 4.984224292291035e-06, |
|
"loss": 2.395, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.3150684931506849, |
|
"grad_norm": 0.6976242893548902, |
|
"learning_rate": 4.982609159486998e-06, |
|
"loss": 2.3621, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.3219178082191781, |
|
"grad_norm": 0.6979047705556364, |
|
"learning_rate": 4.980915614536957e-06, |
|
"loss": 2.3956, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.3287671232876712, |
|
"grad_norm": 0.6638188971561942, |
|
"learning_rate": 4.979143710930805e-06, |
|
"loss": 2.4447, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.3356164383561644, |
|
"grad_norm": 0.6550861221813643, |
|
"learning_rate": 4.977293504633357e-06, |
|
"loss": 2.3803, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.3424657534246575, |
|
"grad_norm": 0.6637160251936499, |
|
"learning_rate": 4.975365054082586e-06, |
|
"loss": 2.5123, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3493150684931507, |
|
"grad_norm": 0.6657032864997552, |
|
"learning_rate": 4.973358420187776e-06, |
|
"loss": 2.4373, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.3561643835616438, |
|
"grad_norm": 0.6671449801461397, |
|
"learning_rate": 4.971273666327598e-06, |
|
"loss": 2.4515, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.363013698630137, |
|
"grad_norm": 0.6574174542409758, |
|
"learning_rate": 4.969110858348108e-06, |
|
"loss": 2.3797, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.3698630136986301, |
|
"grad_norm": 0.7037780907575699, |
|
"learning_rate": 4.96687006456067e-06, |
|
"loss": 2.4465, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.3767123287671233, |
|
"grad_norm": 0.7244746896553337, |
|
"learning_rate": 4.964551355739796e-06, |
|
"loss": 2.4265, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.3835616438356164, |
|
"grad_norm": 0.6581463626850778, |
|
"learning_rate": 4.962154805120908e-06, |
|
"loss": 2.4168, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.3904109589041096, |
|
"grad_norm": 0.6289298559777078, |
|
"learning_rate": 4.959680488398031e-06, |
|
"loss": 2.3328, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.3972602739726027, |
|
"grad_norm": 0.724536921071452, |
|
"learning_rate": 4.957128483721398e-06, |
|
"loss": 2.4319, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.4041095890410959, |
|
"grad_norm": 0.7119106876660016, |
|
"learning_rate": 4.9544988716949825e-06, |
|
"loss": 2.4339, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.410958904109589, |
|
"grad_norm": 0.6561228716087687, |
|
"learning_rate": 4.951791735373953e-06, |
|
"loss": 2.4057, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.4178082191780822, |
|
"grad_norm": 0.7138586667149742, |
|
"learning_rate": 4.949007160262049e-06, |
|
"loss": 2.3573, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.4246575342465753, |
|
"grad_norm": 0.6884519352750037, |
|
"learning_rate": 4.946145234308884e-06, |
|
"loss": 2.3747, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.4315068493150685, |
|
"grad_norm": 0.7403756155139569, |
|
"learning_rate": 4.943206047907159e-06, |
|
"loss": 2.2729, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.4383561643835616, |
|
"grad_norm": 0.6656934708441304, |
|
"learning_rate": 4.940189693889819e-06, |
|
"loss": 2.3805, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.4452054794520548, |
|
"grad_norm": 0.7251476112884715, |
|
"learning_rate": 4.937096267527111e-06, |
|
"loss": 2.3104, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.4520547945205479, |
|
"grad_norm": 0.6921252166237588, |
|
"learning_rate": 4.9339258665235815e-06, |
|
"loss": 2.3499, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.4589041095890411, |
|
"grad_norm": 0.685877730442195, |
|
"learning_rate": 4.930678591014986e-06, |
|
"loss": 2.4331, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.4657534246575342, |
|
"grad_norm": 0.6641111783418014, |
|
"learning_rate": 4.927354543565131e-06, |
|
"loss": 2.3789, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.4726027397260274, |
|
"grad_norm": 0.6657747916085803, |
|
"learning_rate": 4.9239538291626285e-06, |
|
"loss": 2.4542, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.4794520547945205, |
|
"grad_norm": 0.6544468951826022, |
|
"learning_rate": 4.920476555217586e-06, |
|
"loss": 2.4277, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.4863013698630137, |
|
"grad_norm": 0.6670629357129911, |
|
"learning_rate": 4.91692283155821e-06, |
|
"loss": 2.3881, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.4931506849315068, |
|
"grad_norm": 0.6559422824562596, |
|
"learning_rate": 4.913292770427338e-06, |
|
"loss": 2.4225, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.6786459570539399, |
|
"learning_rate": 4.909586486478897e-06, |
|
"loss": 2.4963, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.5068493150684932, |
|
"grad_norm": 0.6892419553245916, |
|
"learning_rate": 4.905804096774274e-06, |
|
"loss": 2.4115, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.5068493150684932, |
|
"eval_loss": 2.4323267936706543, |
|
"eval_runtime": 6.0665, |
|
"eval_samples_per_second": 51.43, |
|
"eval_steps_per_second": 3.297, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.5136986301369864, |
|
"grad_norm": 0.6768758807890537, |
|
"learning_rate": 4.901945720778627e-06, |
|
"loss": 2.3938, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.5205479452054794, |
|
"grad_norm": 0.7339799079732157, |
|
"learning_rate": 4.898011480357109e-06, |
|
"loss": 2.2883, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.5273972602739726, |
|
"grad_norm": 0.7397662592480156, |
|
"learning_rate": 4.894001499771015e-06, |
|
"loss": 2.4598, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.5342465753424658, |
|
"grad_norm": 0.6819769884160725, |
|
"learning_rate": 4.889915905673865e-06, |
|
"loss": 2.4188, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.541095890410959, |
|
"grad_norm": 0.6772539201070739, |
|
"learning_rate": 4.885754827107395e-06, |
|
"loss": 2.4142, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.547945205479452, |
|
"grad_norm": 0.659763812226515, |
|
"learning_rate": 4.88151839549749e-06, |
|
"loss": 2.298, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5547945205479452, |
|
"grad_norm": 0.7286093280405468, |
|
"learning_rate": 4.877206744650024e-06, |
|
"loss": 2.3898, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.5616438356164384, |
|
"grad_norm": 0.6986932306550346, |
|
"learning_rate": 4.8728200107466415e-06, |
|
"loss": 2.3598, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.5684931506849316, |
|
"grad_norm": 0.6842367412456468, |
|
"learning_rate": 4.868358332340451e-06, |
|
"loss": 2.3763, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.5753424657534246, |
|
"grad_norm": 0.6724304892107736, |
|
"learning_rate": 4.863821850351655e-06, |
|
"loss": 2.4028, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.5821917808219178, |
|
"grad_norm": 0.6674281958373836, |
|
"learning_rate": 4.859210708063091e-06, |
|
"loss": 2.4226, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.589041095890411, |
|
"grad_norm": 0.6761850508539816, |
|
"learning_rate": 4.854525051115711e-06, |
|
"loss": 2.2755, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.5958904109589042, |
|
"grad_norm": 0.7049437160925219, |
|
"learning_rate": 4.8497650275039795e-06, |
|
"loss": 2.3633, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.6027397260273972, |
|
"grad_norm": 0.6675473282240035, |
|
"learning_rate": 4.844930787571204e-06, |
|
"loss": 2.4161, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.6095890410958904, |
|
"grad_norm": 0.6893380001896178, |
|
"learning_rate": 4.84002248400478e-06, |
|
"loss": 2.3261, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.6164383561643836, |
|
"grad_norm": 0.6557076444481973, |
|
"learning_rate": 4.835040271831371e-06, |
|
"loss": 2.4113, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.6232876712328768, |
|
"grad_norm": 0.7106326693649547, |
|
"learning_rate": 4.829984308412011e-06, |
|
"loss": 2.3468, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.6301369863013698, |
|
"grad_norm": 0.6780886154634945, |
|
"learning_rate": 4.82485475343714e-06, |
|
"loss": 2.4227, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.636986301369863, |
|
"grad_norm": 0.6848177542457299, |
|
"learning_rate": 4.819651768921552e-06, |
|
"loss": 2.4455, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.6438356164383562, |
|
"grad_norm": 0.6537386212526299, |
|
"learning_rate": 4.814375519199281e-06, |
|
"loss": 2.359, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.6506849315068494, |
|
"grad_norm": 0.692282068736651, |
|
"learning_rate": 4.809026170918414e-06, |
|
"loss": 2.4523, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.6575342465753424, |
|
"grad_norm": 0.6623529936148479, |
|
"learning_rate": 4.803603893035822e-06, |
|
"loss": 2.4321, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.6643835616438356, |
|
"grad_norm": 0.6842008308899095, |
|
"learning_rate": 4.798108856811828e-06, |
|
"loss": 2.415, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.6712328767123288, |
|
"grad_norm": 0.6761407885852002, |
|
"learning_rate": 4.7925412358047965e-06, |
|
"loss": 2.4099, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.678082191780822, |
|
"grad_norm": 0.695106737467069, |
|
"learning_rate": 4.786901205865647e-06, |
|
"loss": 2.3944, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.684931506849315, |
|
"grad_norm": 0.7154498681645167, |
|
"learning_rate": 4.781188945132311e-06, |
|
"loss": 2.4261, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6917808219178082, |
|
"grad_norm": 0.708416595063437, |
|
"learning_rate": 4.775404634024093e-06, |
|
"loss": 2.4236, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.6986301369863014, |
|
"grad_norm": 0.6561586213988884, |
|
"learning_rate": 4.769548455235979e-06, |
|
"loss": 2.2712, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.7054794520547946, |
|
"grad_norm": 0.710907523788328, |
|
"learning_rate": 4.763620593732867e-06, |
|
"loss": 2.3401, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.7123287671232876, |
|
"grad_norm": 0.6765968692107874, |
|
"learning_rate": 4.75762123674372e-06, |
|
"loss": 2.4381, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.7191780821917808, |
|
"grad_norm": 0.9946667705374828, |
|
"learning_rate": 4.751550573755658e-06, |
|
"loss": 2.4825, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.726027397260274, |
|
"grad_norm": 0.6808703038492471, |
|
"learning_rate": 4.745408796507968e-06, |
|
"loss": 2.4417, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.7328767123287672, |
|
"grad_norm": 0.6511402815615133, |
|
"learning_rate": 4.73919609898605e-06, |
|
"loss": 2.412, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.7397260273972602, |
|
"grad_norm": 0.6806168774586489, |
|
"learning_rate": 4.7329126774152945e-06, |
|
"loss": 2.4047, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.7465753424657534, |
|
"grad_norm": 0.6581235881106364, |
|
"learning_rate": 4.726558730254876e-06, |
|
"loss": 2.3624, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.7534246575342466, |
|
"grad_norm": 0.6858279965463718, |
|
"learning_rate": 4.720134458191494e-06, |
|
"loss": 2.4046, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.7602739726027398, |
|
"grad_norm": 0.7044328584380498, |
|
"learning_rate": 4.7136400641330245e-06, |
|
"loss": 2.3298, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.7602739726027398, |
|
"eval_loss": 2.422318935394287, |
|
"eval_runtime": 6.1327, |
|
"eval_samples_per_second": 50.874, |
|
"eval_steps_per_second": 3.261, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.7671232876712328, |
|
"grad_norm": 0.6949341182078216, |
|
"learning_rate": 4.707075753202123e-06, |
|
"loss": 2.4219, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.773972602739726, |
|
"grad_norm": 0.6883254066631505, |
|
"learning_rate": 4.700441732729733e-06, |
|
"loss": 2.4841, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.7808219178082192, |
|
"grad_norm": 0.688766337379056, |
|
"learning_rate": 4.693738212248549e-06, |
|
"loss": 2.3722, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.7876712328767124, |
|
"grad_norm": 0.6887580535922757, |
|
"learning_rate": 4.68696540348639e-06, |
|
"loss": 2.3637, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.7945205479452054, |
|
"grad_norm": 0.6719005239018524, |
|
"learning_rate": 4.68012352035952e-06, |
|
"loss": 2.4465, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.8013698630136986, |
|
"grad_norm": 0.6327678432767432, |
|
"learning_rate": 4.673212778965881e-06, |
|
"loss": 2.4219, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.8082191780821918, |
|
"grad_norm": 0.6572338268581427, |
|
"learning_rate": 4.66623339757828e-06, |
|
"loss": 2.4384, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.815068493150685, |
|
"grad_norm": 0.6774201762455547, |
|
"learning_rate": 4.659185596637485e-06, |
|
"loss": 2.4289, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.821917808219178, |
|
"grad_norm": 0.6765522988548955, |
|
"learning_rate": 4.652069598745267e-06, |
|
"loss": 2.3849, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.8287671232876712, |
|
"grad_norm": 0.6553606050310139, |
|
"learning_rate": 4.644885628657369e-06, |
|
"loss": 2.3953, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.8356164383561644, |
|
"grad_norm": 0.6817645269442179, |
|
"learning_rate": 4.637633913276406e-06, |
|
"loss": 2.4534, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.8424657534246576, |
|
"grad_norm": 0.6842353980823837, |
|
"learning_rate": 4.630314681644701e-06, |
|
"loss": 2.3376, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.8493150684931506, |
|
"grad_norm": 0.6938393939968492, |
|
"learning_rate": 4.622928164937046e-06, |
|
"loss": 2.3795, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.8561643835616438, |
|
"grad_norm": 0.6503903813017006, |
|
"learning_rate": 4.615474596453406e-06, |
|
"loss": 2.3801, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.863013698630137, |
|
"grad_norm": 0.6690065166974256, |
|
"learning_rate": 4.607954211611543e-06, |
|
"loss": 2.4771, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.8698630136986302, |
|
"grad_norm": 0.6565369111054283, |
|
"learning_rate": 4.600367247939592e-06, |
|
"loss": 2.4468, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.8767123287671232, |
|
"grad_norm": 0.6590600861747526, |
|
"learning_rate": 4.5927139450685455e-06, |
|
"loss": 2.2973, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.8835616438356164, |
|
"grad_norm": 0.6642768664806619, |
|
"learning_rate": 4.584994544724695e-06, |
|
"loss": 2.4463, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.8904109589041096, |
|
"grad_norm": 0.7254289901639452, |
|
"learning_rate": 4.577209290721991e-06, |
|
"loss": 2.2925, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.8972602739726028, |
|
"grad_norm": 0.6264601593599859, |
|
"learning_rate": 4.569358428954343e-06, |
|
"loss": 2.3624, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.9041095890410958, |
|
"grad_norm": 0.6932555262208767, |
|
"learning_rate": 4.561442207387854e-06, |
|
"loss": 2.3315, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.910958904109589, |
|
"grad_norm": 0.6857220601161248, |
|
"learning_rate": 4.55346087605299e-06, |
|
"loss": 2.372, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.9178082191780822, |
|
"grad_norm": 0.701001912101975, |
|
"learning_rate": 4.5454146870366775e-06, |
|
"loss": 2.4026, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.9246575342465754, |
|
"grad_norm": 0.6340272298295848, |
|
"learning_rate": 4.537303894474349e-06, |
|
"loss": 2.4131, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.9315068493150684, |
|
"grad_norm": 0.6136778043717291, |
|
"learning_rate": 4.529128754541909e-06, |
|
"loss": 2.297, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.9383561643835616, |
|
"grad_norm": 0.6322021005066626, |
|
"learning_rate": 4.5208895254476495e-06, |
|
"loss": 2.4084, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.9452054794520548, |
|
"grad_norm": 0.6445887057946764, |
|
"learning_rate": 4.512586467424087e-06, |
|
"loss": 2.3645, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.952054794520548, |
|
"grad_norm": 0.7062860383104262, |
|
"learning_rate": 4.504219842719752e-06, |
|
"loss": 2.3655, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.958904109589041, |
|
"grad_norm": 0.6781212322165483, |
|
"learning_rate": 4.4957899155908954e-06, |
|
"loss": 2.4285, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.9657534246575342, |
|
"grad_norm": 0.6678298874200482, |
|
"learning_rate": 4.487296952293156e-06, |
|
"loss": 2.38, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.9726027397260274, |
|
"grad_norm": 0.6575931921755482, |
|
"learning_rate": 4.478741221073136e-06, |
|
"loss": 2.4094, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.9794520547945206, |
|
"grad_norm": 0.6868011511324458, |
|
"learning_rate": 4.470122992159938e-06, |
|
"loss": 2.4096, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.9863013698630136, |
|
"grad_norm": 0.6854773154104912, |
|
"learning_rate": 4.461442537756629e-06, |
|
"loss": 2.4249, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.9931506849315068, |
|
"grad_norm": 0.708510432833667, |
|
"learning_rate": 4.452700132031639e-06, |
|
"loss": 2.3724, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.6566297487438831, |
|
"learning_rate": 4.443896051110105e-06, |
|
"loss": 2.391, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.0068493150684932, |
|
"grad_norm": 0.7629369425964684, |
|
"learning_rate": 4.435030573065148e-06, |
|
"loss": 2.3448, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.0136986301369864, |
|
"grad_norm": 0.7384812144528767, |
|
"learning_rate": 4.426103977909094e-06, |
|
"loss": 2.322, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.0136986301369864, |
|
"eval_loss": 2.417771339416504, |
|
"eval_runtime": 53.3919, |
|
"eval_samples_per_second": 5.844, |
|
"eval_steps_per_second": 0.375, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.0205479452054795, |
|
"grad_norm": 0.6662500395219653, |
|
"learning_rate": 4.417116547584621e-06, |
|
"loss": 2.35, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.0273972602739727, |
|
"grad_norm": 0.6789888617564587, |
|
"learning_rate": 4.408068565955864e-06, |
|
"loss": 2.3845, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.0342465753424657, |
|
"grad_norm": 0.6998134909721523, |
|
"learning_rate": 4.398960318799446e-06, |
|
"loss": 2.35, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.0410958904109588, |
|
"grad_norm": 0.7216093013270933, |
|
"learning_rate": 4.389792093795444e-06, |
|
"loss": 2.3223, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.047945205479452, |
|
"grad_norm": 0.7403493735812074, |
|
"learning_rate": 4.380564180518318e-06, |
|
"loss": 2.3253, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.0547945205479452, |
|
"grad_norm": 0.7218865625666383, |
|
"learning_rate": 4.3712768704277535e-06, |
|
"loss": 2.3106, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.0616438356164384, |
|
"grad_norm": 0.7197974746780744, |
|
"learning_rate": 4.361930456859455e-06, |
|
"loss": 2.3033, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.0684931506849316, |
|
"grad_norm": 0.7249006057182749, |
|
"learning_rate": 4.35252523501589e-06, |
|
"loss": 2.3888, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.0753424657534247, |
|
"grad_norm": 0.7037919961575326, |
|
"learning_rate": 4.343061501956959e-06, |
|
"loss": 2.3402, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.0821917808219177, |
|
"grad_norm": 0.6996471940866854, |
|
"learning_rate": 4.3335395565906115e-06, |
|
"loss": 2.3194, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.0890410958904109, |
|
"grad_norm": 0.717826362193492, |
|
"learning_rate": 4.323959699663412e-06, |
|
"loss": 2.3852, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.095890410958904, |
|
"grad_norm": 0.708765230517044, |
|
"learning_rate": 4.314322233751034e-06, |
|
"loss": 2.3206, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.1027397260273972, |
|
"grad_norm": 0.6776217729604187, |
|
"learning_rate": 4.304627463248706e-06, |
|
"loss": 2.3081, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.1095890410958904, |
|
"grad_norm": 0.6853081994090057, |
|
"learning_rate": 4.294875694361599e-06, |
|
"loss": 2.3176, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.1164383561643836, |
|
"grad_norm": 0.6830768891525606, |
|
"learning_rate": 4.285067235095152e-06, |
|
"loss": 2.3364, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.1232876712328768, |
|
"grad_norm": 0.7089445206356011, |
|
"learning_rate": 4.275202395245346e-06, |
|
"loss": 2.3418, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.13013698630137, |
|
"grad_norm": 0.7823709313759815, |
|
"learning_rate": 4.26528148638892e-06, |
|
"loss": 2.3798, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.1369863013698631, |
|
"grad_norm": 0.6709932434280309, |
|
"learning_rate": 4.255304821873526e-06, |
|
"loss": 2.2267, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.143835616438356, |
|
"grad_norm": 0.6628107638339615, |
|
"learning_rate": 4.245272716807834e-06, |
|
"loss": 2.3308, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.1506849315068493, |
|
"grad_norm": 0.7184114487855815, |
|
"learning_rate": 4.2351854880515856e-06, |
|
"loss": 2.3017, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.1575342465753424, |
|
"grad_norm": 0.7166834764618115, |
|
"learning_rate": 4.225043454205573e-06, |
|
"loss": 2.3551, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.1643835616438356, |
|
"grad_norm": 0.7234116110718222, |
|
"learning_rate": 4.2148469356015895e-06, |
|
"loss": 2.3205, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.1712328767123288, |
|
"grad_norm": 0.6873414003582445, |
|
"learning_rate": 4.204596254292303e-06, |
|
"loss": 2.3675, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.178082191780822, |
|
"grad_norm": 0.7161572137012578, |
|
"learning_rate": 4.194291734041088e-06, |
|
"loss": 2.2708, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.1849315068493151, |
|
"grad_norm": 0.7142712637206968, |
|
"learning_rate": 4.183933700311801e-06, |
|
"loss": 2.2617, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.191780821917808, |
|
"grad_norm": 0.698584695168926, |
|
"learning_rate": 4.173522480258494e-06, |
|
"loss": 2.3686, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.1986301369863013, |
|
"grad_norm": 0.7275575279831493, |
|
"learning_rate": 4.163058402715092e-06, |
|
"loss": 2.3088, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.2054794520547945, |
|
"grad_norm": 0.7066892597231554, |
|
"learning_rate": 4.152541798184995e-06, |
|
"loss": 2.3273, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.2123287671232876, |
|
"grad_norm": 0.6692377666271583, |
|
"learning_rate": 4.141972998830651e-06, |
|
"loss": 2.368, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.2191780821917808, |
|
"grad_norm": 0.7519738253706634, |
|
"learning_rate": 4.1313523384630565e-06, |
|
"loss": 2.2878, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.226027397260274, |
|
"grad_norm": 0.7029718569317863, |
|
"learning_rate": 4.120680152531214e-06, |
|
"loss": 2.2833, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.2328767123287672, |
|
"grad_norm": 0.7334026090190444, |
|
"learning_rate": 4.109956778111544e-06, |
|
"loss": 2.3322, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.2397260273972603, |
|
"grad_norm": 0.6951014303600972, |
|
"learning_rate": 4.099182553897228e-06, |
|
"loss": 2.2781, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.2465753424657535, |
|
"grad_norm": 0.6885637550568241, |
|
"learning_rate": 4.088357820187521e-06, |
|
"loss": 2.2814, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.2534246575342465, |
|
"grad_norm": 0.6917922559068629, |
|
"learning_rate": 4.077482918876995e-06, |
|
"loss": 2.378, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.2602739726027397, |
|
"grad_norm": 0.6912354421485389, |
|
"learning_rate": 4.066558193444746e-06, |
|
"loss": 2.3087, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.2671232876712328, |
|
"grad_norm": 0.7423647896303166, |
|
"learning_rate": 4.0555839889435444e-06, |
|
"loss": 2.2661, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.2671232876712328, |
|
"eval_loss": 2.4178218841552734, |
|
"eval_runtime": 5.9059, |
|
"eval_samples_per_second": 52.828, |
|
"eval_steps_per_second": 3.386, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.273972602739726, |
|
"grad_norm": 0.7478496634954042, |
|
"learning_rate": 4.044560651988933e-06, |
|
"loss": 2.3361, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.2808219178082192, |
|
"grad_norm": 0.7065399264251201, |
|
"learning_rate": 4.033488530748285e-06, |
|
"loss": 2.2644, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.2876712328767124, |
|
"grad_norm": 0.726480536340863, |
|
"learning_rate": 4.022367974929803e-06, |
|
"loss": 2.1882, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.2945205479452055, |
|
"grad_norm": 0.6827946001097096, |
|
"learning_rate": 4.0111993357714755e-06, |
|
"loss": 2.2887, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.3013698630136985, |
|
"grad_norm": 0.6814103107109724, |
|
"learning_rate": 3.999982966029981e-06, |
|
"loss": 2.3315, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.308219178082192, |
|
"grad_norm": 0.65846390651437, |
|
"learning_rate": 3.98871921996955e-06, |
|
"loss": 2.2595, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.3150684931506849, |
|
"grad_norm": 0.6664034822825207, |
|
"learning_rate": 3.977408453350774e-06, |
|
"loss": 2.3524, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.321917808219178, |
|
"grad_norm": 0.7146251642801748, |
|
"learning_rate": 3.966051023419366e-06, |
|
"loss": 2.3562, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.3287671232876712, |
|
"grad_norm": 0.7013378563213172, |
|
"learning_rate": 3.9546472888948825e-06, |
|
"loss": 2.2865, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.3356164383561644, |
|
"grad_norm": 0.6884022789569789, |
|
"learning_rate": 3.943197609959389e-06, |
|
"loss": 2.3293, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.3424657534246576, |
|
"grad_norm": 0.7263022758504937, |
|
"learning_rate": 3.931702348246087e-06, |
|
"loss": 2.2485, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.3493150684931507, |
|
"grad_norm": 0.6879494874948079, |
|
"learning_rate": 3.92016186682789e-06, |
|
"loss": 2.2652, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.356164383561644, |
|
"grad_norm": 0.7035221618072612, |
|
"learning_rate": 3.9085765302059556e-06, |
|
"loss": 2.3413, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.3630136986301369, |
|
"grad_norm": 0.7042222964511125, |
|
"learning_rate": 3.8969467042981725e-06, |
|
"loss": 2.3202, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.36986301369863, |
|
"grad_norm": 0.6961599428087603, |
|
"learning_rate": 3.885272756427609e-06, |
|
"loss": 2.2134, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.3767123287671232, |
|
"grad_norm": 0.7445990308351226, |
|
"learning_rate": 3.873555055310902e-06, |
|
"loss": 2.3083, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.3835616438356164, |
|
"grad_norm": 0.7272556327744341, |
|
"learning_rate": 3.86179397104662e-06, |
|
"loss": 2.2611, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.3904109589041096, |
|
"grad_norm": 0.6734004584059651, |
|
"learning_rate": 3.849989875103566e-06, |
|
"loss": 2.3396, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.3972602739726028, |
|
"grad_norm": 0.702149629138006, |
|
"learning_rate": 3.83814314030905e-06, |
|
"loss": 2.2206, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.404109589041096, |
|
"grad_norm": 0.7668660946577405, |
|
"learning_rate": 3.826254140837111e-06, |
|
"loss": 2.3745, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.410958904109589, |
|
"grad_norm": 0.6972264397617832, |
|
"learning_rate": 3.8143232521967023e-06, |
|
"loss": 2.2376, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.4178082191780823, |
|
"grad_norm": 0.6802719750908277, |
|
"learning_rate": 3.802350851219826e-06, |
|
"loss": 2.3348, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.4246575342465753, |
|
"grad_norm": 0.679939282001721, |
|
"learning_rate": 3.7903373160496342e-06, |
|
"loss": 2.3184, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.4315068493150684, |
|
"grad_norm": 0.708335225525305, |
|
"learning_rate": 3.778283026128485e-06, |
|
"loss": 2.2065, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.4383561643835616, |
|
"grad_norm": 0.6821337057973484, |
|
"learning_rate": 3.7661883621859584e-06, |
|
"loss": 2.3815, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.4452054794520548, |
|
"grad_norm": 0.6864170898125791, |
|
"learning_rate": 3.754053706226829e-06, |
|
"loss": 2.2901, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.452054794520548, |
|
"grad_norm": 0.690989555098226, |
|
"learning_rate": 3.741879441519004e-06, |
|
"loss": 2.3053, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.4589041095890412, |
|
"grad_norm": 0.7326752735961912, |
|
"learning_rate": 3.729665952581415e-06, |
|
"loss": 2.2915, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.4657534246575343, |
|
"grad_norm": 0.6795313294527453, |
|
"learning_rate": 3.7174136251718735e-06, |
|
"loss": 2.3548, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.4726027397260273, |
|
"grad_norm": 0.7026030269068815, |
|
"learning_rate": 3.705122846274889e-06, |
|
"loss": 2.339, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.4794520547945205, |
|
"grad_norm": 0.6968149329541087, |
|
"learning_rate": 3.6927940040894427e-06, |
|
"loss": 2.324, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.4863013698630136, |
|
"grad_norm": 0.681949385138759, |
|
"learning_rate": 3.680427488016731e-06, |
|
"loss": 2.2522, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.4931506849315068, |
|
"grad_norm": 0.7101393810809886, |
|
"learning_rate": 3.6680236886478627e-06, |
|
"loss": 2.3273, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.681265285699898, |
|
"learning_rate": 3.6555829977515213e-06, |
|
"loss": 2.3807, |
|
"step": 219 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 584, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 73, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 137562433781760.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|