|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.1005656819610308, |
|
"eval_steps": 160, |
|
"global_step": 160, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006285355122564425, |
|
"grad_norm": 2.96633243560791, |
|
"learning_rate": 0.0, |
|
"loss": 7.5468, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0006285355122564425, |
|
"eval_loss": 7.076071739196777, |
|
"eval_runtime": 1556.6109, |
|
"eval_samples_per_second": 1.656, |
|
"eval_steps_per_second": 1.656, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001257071024512885, |
|
"grad_norm": 2.7123501300811768, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 6.3309, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0018856065367693275, |
|
"grad_norm": 2.4243223667144775, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 6.7763, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00251414204902577, |
|
"grad_norm": 2.319216728210449, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 6.8187, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0031426775612822125, |
|
"grad_norm": 2.4935989379882812, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 7.0526, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.003771213073538655, |
|
"grad_norm": 2.1718926429748535, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 6.7646, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0043997485857950975, |
|
"grad_norm": 2.177217960357666, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 6.6131, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.00502828409805154, |
|
"grad_norm": 2.9915874004364014, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 7.6544, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0056568196103079825, |
|
"grad_norm": 2.6981074810028076, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 6.799, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.006285355122564425, |
|
"grad_norm": 4.512426376342773, |
|
"learning_rate": 3.6e-06, |
|
"loss": 8.3895, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0069138906348208675, |
|
"grad_norm": 2.3968913555145264, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 6.3478, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.00754242614707731, |
|
"grad_norm": 1.9744027853012085, |
|
"learning_rate": 4.4e-06, |
|
"loss": 5.5536, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.008170961659333752, |
|
"grad_norm": 3.050507068634033, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 6.5491, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.008799497171590195, |
|
"grad_norm": 3.0034799575805664, |
|
"learning_rate": 5.2e-06, |
|
"loss": 7.2764, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.009428032683846637, |
|
"grad_norm": 2.323613405227661, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 6.8805, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01005656819610308, |
|
"grad_norm": 3.129593849182129, |
|
"learning_rate": 6e-06, |
|
"loss": 7.3171, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.010685103708359522, |
|
"grad_norm": 2.296137571334839, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 6.4251, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.011313639220615965, |
|
"grad_norm": 2.637282133102417, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 6.6142, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.011942174732872407, |
|
"grad_norm": 2.1313271522521973, |
|
"learning_rate": 7.2e-06, |
|
"loss": 6.3841, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01257071024512885, |
|
"grad_norm": 2.7492284774780273, |
|
"learning_rate": 7.6e-06, |
|
"loss": 6.8148, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.013199245757385292, |
|
"grad_norm": 2.945878267288208, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 6.9732, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.013827781269641735, |
|
"grad_norm": 3.709951162338257, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 7.7273, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.014456316781898177, |
|
"grad_norm": 3.023289203643799, |
|
"learning_rate": 8.8e-06, |
|
"loss": 7.0649, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01508485229415462, |
|
"grad_norm": 3.163715124130249, |
|
"learning_rate": 9.2e-06, |
|
"loss": 6.9342, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01571338780641106, |
|
"grad_norm": 4.114445686340332, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 8.119, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.016341923318667503, |
|
"grad_norm": 3.021068572998047, |
|
"learning_rate": 1e-05, |
|
"loss": 7.4067, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.01697045883092395, |
|
"grad_norm": 3.724407911300659, |
|
"learning_rate": 1.04e-05, |
|
"loss": 7.3869, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.01759899434318039, |
|
"grad_norm": 2.656257390975952, |
|
"learning_rate": 1.08e-05, |
|
"loss": 5.9961, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.018227529855436832, |
|
"grad_norm": 2.7785143852233887, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 5.6596, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.018856065367693273, |
|
"grad_norm": 3.130934715270996, |
|
"learning_rate": 1.16e-05, |
|
"loss": 6.6092, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01948460087994972, |
|
"grad_norm": 3.42301869392395, |
|
"learning_rate": 1.2e-05, |
|
"loss": 6.3373, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02011313639220616, |
|
"grad_norm": 2.8691611289978027, |
|
"learning_rate": 1.24e-05, |
|
"loss": 6.5923, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.020741671904462602, |
|
"grad_norm": 2.917086601257324, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 5.9773, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.021370207416719043, |
|
"grad_norm": 4.07196044921875, |
|
"learning_rate": 1.32e-05, |
|
"loss": 7.4423, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02199874292897549, |
|
"grad_norm": 4.738312244415283, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 7.5113, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02262727844123193, |
|
"grad_norm": 3.898664712905884, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 6.8738, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.023255813953488372, |
|
"grad_norm": 3.7448792457580566, |
|
"learning_rate": 1.44e-05, |
|
"loss": 6.9615, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.023884349465744813, |
|
"grad_norm": 3.5938379764556885, |
|
"learning_rate": 1.48e-05, |
|
"loss": 6.0651, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02451288497800126, |
|
"grad_norm": 4.253636360168457, |
|
"learning_rate": 1.52e-05, |
|
"loss": 6.9986, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0251414204902577, |
|
"grad_norm": 4.985451698303223, |
|
"learning_rate": 1.56e-05, |
|
"loss": 7.252, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.025769956002514142, |
|
"grad_norm": 4.376275062561035, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 6.8091, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.026398491514770583, |
|
"grad_norm": 4.697645664215088, |
|
"learning_rate": 1.6400000000000002e-05, |
|
"loss": 6.4319, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.02702702702702703, |
|
"grad_norm": 5.258227348327637, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 6.7672, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.02765556253928347, |
|
"grad_norm": 5.063000679016113, |
|
"learning_rate": 1.7199999999999998e-05, |
|
"loss": 6.3354, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.028284098051539912, |
|
"grad_norm": 4.573636531829834, |
|
"learning_rate": 1.76e-05, |
|
"loss": 6.3374, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.028912633563796353, |
|
"grad_norm": 4.72340202331543, |
|
"learning_rate": 1.8e-05, |
|
"loss": 6.6553, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0295411690760528, |
|
"grad_norm": 6.681248664855957, |
|
"learning_rate": 1.84e-05, |
|
"loss": 7.7157, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03016970458830924, |
|
"grad_norm": 5.952408313751221, |
|
"learning_rate": 1.88e-05, |
|
"loss": 5.8215, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.030798240100565682, |
|
"grad_norm": 6.599308967590332, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 6.921, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03142677561282212, |
|
"grad_norm": 6.538867473602295, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 7.1274, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03205531112507857, |
|
"grad_norm": 5.91294527053833, |
|
"learning_rate": 2e-05, |
|
"loss": 6.7263, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.03268384663733501, |
|
"grad_norm": 7.943373203277588, |
|
"learning_rate": 2.04e-05, |
|
"loss": 7.4335, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03331238214959145, |
|
"grad_norm": 7.023540496826172, |
|
"learning_rate": 2.08e-05, |
|
"loss": 6.3428, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0339409176618479, |
|
"grad_norm": 7.031850814819336, |
|
"learning_rate": 2.12e-05, |
|
"loss": 6.2423, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.034569453174104335, |
|
"grad_norm": 6.891653537750244, |
|
"learning_rate": 2.16e-05, |
|
"loss": 6.4081, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03519798868636078, |
|
"grad_norm": 8.165786743164062, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 7.1081, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.035826524198617225, |
|
"grad_norm": 9.146330833435059, |
|
"learning_rate": 2.2400000000000002e-05, |
|
"loss": 7.4309, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.036455059710873663, |
|
"grad_norm": 8.637526512145996, |
|
"learning_rate": 2.2800000000000002e-05, |
|
"loss": 6.9889, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03708359522313011, |
|
"grad_norm": 8.397353172302246, |
|
"learning_rate": 2.32e-05, |
|
"loss": 5.8222, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03771213073538655, |
|
"grad_norm": 9.219857215881348, |
|
"learning_rate": 2.36e-05, |
|
"loss": 7.0839, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03834066624764299, |
|
"grad_norm": 8.762686729431152, |
|
"learning_rate": 2.4e-05, |
|
"loss": 6.192, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03896920175989944, |
|
"grad_norm": 9.55370044708252, |
|
"learning_rate": 2.44e-05, |
|
"loss": 6.2674, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.039597737272155875, |
|
"grad_norm": 10.248711585998535, |
|
"learning_rate": 2.48e-05, |
|
"loss": 7.4737, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.04022627278441232, |
|
"grad_norm": 9.455551147460938, |
|
"learning_rate": 2.5200000000000003e-05, |
|
"loss": 6.5796, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.04085480829666876, |
|
"grad_norm": 10.217570304870605, |
|
"learning_rate": 2.5600000000000002e-05, |
|
"loss": 6.8329, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.041483343808925204, |
|
"grad_norm": 12.418697357177734, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 7.0127, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.04211187932118165, |
|
"grad_norm": 13.362143516540527, |
|
"learning_rate": 2.64e-05, |
|
"loss": 6.3027, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.04274041483343809, |
|
"grad_norm": 10.577826499938965, |
|
"learning_rate": 2.6800000000000004e-05, |
|
"loss": 6.2406, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.04336895034569453, |
|
"grad_norm": 13.15530776977539, |
|
"learning_rate": 2.7200000000000004e-05, |
|
"loss": 7.3093, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.04399748585795098, |
|
"grad_norm": 10.775976181030273, |
|
"learning_rate": 2.7600000000000003e-05, |
|
"loss": 5.9398, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.044626021370207415, |
|
"grad_norm": 12.925591468811035, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 6.7144, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.04525455688246386, |
|
"grad_norm": 12.626113891601562, |
|
"learning_rate": 2.84e-05, |
|
"loss": 6.5783, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0458830923947203, |
|
"grad_norm": 14.04005241394043, |
|
"learning_rate": 2.88e-05, |
|
"loss": 6.1111, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.046511627906976744, |
|
"grad_norm": 14.279847145080566, |
|
"learning_rate": 2.9199999999999998e-05, |
|
"loss": 5.7717, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04714016341923319, |
|
"grad_norm": 13.597288131713867, |
|
"learning_rate": 2.96e-05, |
|
"loss": 6.7714, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04776869893148963, |
|
"grad_norm": 12.93455696105957, |
|
"learning_rate": 3e-05, |
|
"loss": 6.5082, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.04839723444374607, |
|
"grad_norm": 12.823902130126953, |
|
"learning_rate": 3.04e-05, |
|
"loss": 6.7108, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.04902576995600252, |
|
"grad_norm": 16.93589973449707, |
|
"learning_rate": 3.08e-05, |
|
"loss": 6.2773, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.049654305468258955, |
|
"grad_norm": 14.550610542297363, |
|
"learning_rate": 3.12e-05, |
|
"loss": 6.4329, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.0502828409805154, |
|
"grad_norm": 11.992222785949707, |
|
"learning_rate": 3.16e-05, |
|
"loss": 5.3369, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05091137649277184, |
|
"grad_norm": 12.921990394592285, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 5.8783, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.051539912005028284, |
|
"grad_norm": 14.483002662658691, |
|
"learning_rate": 3.24e-05, |
|
"loss": 7.4091, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.05216844751728473, |
|
"grad_norm": 15.877086639404297, |
|
"learning_rate": 3.2800000000000004e-05, |
|
"loss": 6.9743, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.05279698302954117, |
|
"grad_norm": 15.85240650177002, |
|
"learning_rate": 3.32e-05, |
|
"loss": 7.0001, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.05342551854179761, |
|
"grad_norm": 17.0369815826416, |
|
"learning_rate": 3.3600000000000004e-05, |
|
"loss": 5.8022, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05405405405405406, |
|
"grad_norm": 15.62733268737793, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 6.8172, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.054682589566310495, |
|
"grad_norm": 18.79668426513672, |
|
"learning_rate": 3.4399999999999996e-05, |
|
"loss": 6.6659, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.05531112507856694, |
|
"grad_norm": 15.78856086730957, |
|
"learning_rate": 3.48e-05, |
|
"loss": 5.7255, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.05593966059082338, |
|
"grad_norm": 13.196313858032227, |
|
"learning_rate": 3.52e-05, |
|
"loss": 5.8423, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.056568196103079824, |
|
"grad_norm": 16.4498233795166, |
|
"learning_rate": 3.56e-05, |
|
"loss": 5.4969, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05719673161533627, |
|
"grad_norm": 15.243169784545898, |
|
"learning_rate": 3.6e-05, |
|
"loss": 6.0186, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.05782526712759271, |
|
"grad_norm": 17.294044494628906, |
|
"learning_rate": 3.6400000000000004e-05, |
|
"loss": 6.8408, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.05845380263984915, |
|
"grad_norm": 19.32530975341797, |
|
"learning_rate": 3.68e-05, |
|
"loss": 6.5562, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0590823381521056, |
|
"grad_norm": 20.01514434814453, |
|
"learning_rate": 3.72e-05, |
|
"loss": 6.4273, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.059710873664362035, |
|
"grad_norm": 19.90467643737793, |
|
"learning_rate": 3.76e-05, |
|
"loss": 7.2277, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06033940917661848, |
|
"grad_norm": 17.582975387573242, |
|
"learning_rate": 3.8e-05, |
|
"loss": 7.0173, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.06096794468887492, |
|
"grad_norm": 17.471969604492188, |
|
"learning_rate": 3.8400000000000005e-05, |
|
"loss": 6.6919, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.061596480201131364, |
|
"grad_norm": 16.06781005859375, |
|
"learning_rate": 3.88e-05, |
|
"loss": 6.54, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.06222501571338781, |
|
"grad_norm": 17.99164390563965, |
|
"learning_rate": 3.9200000000000004e-05, |
|
"loss": 5.9168, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.06285355122564425, |
|
"grad_norm": 16.01885986328125, |
|
"learning_rate": 3.960000000000001e-05, |
|
"loss": 5.9236, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06348208673790069, |
|
"grad_norm": 9.705466270446777, |
|
"learning_rate": 4e-05, |
|
"loss": 4.4168, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.06411062225015714, |
|
"grad_norm": 21.02065086364746, |
|
"learning_rate": 4.0400000000000006e-05, |
|
"loss": 6.7312, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.06473915776241358, |
|
"grad_norm": 16.56360626220703, |
|
"learning_rate": 4.08e-05, |
|
"loss": 6.5865, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.06536769327467001, |
|
"grad_norm": 17.319807052612305, |
|
"learning_rate": 4.12e-05, |
|
"loss": 6.4122, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.06599622878692646, |
|
"grad_norm": 21.3114070892334, |
|
"learning_rate": 4.16e-05, |
|
"loss": 6.9886, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.0666247642991829, |
|
"grad_norm": 21.147327423095703, |
|
"learning_rate": 4.2e-05, |
|
"loss": 6.7595, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.06725329981143935, |
|
"grad_norm": 16.264938354492188, |
|
"learning_rate": 4.24e-05, |
|
"loss": 6.114, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.0678818353236958, |
|
"grad_norm": 20.424713134765625, |
|
"learning_rate": 4.2800000000000004e-05, |
|
"loss": 6.9508, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.06851037083595223, |
|
"grad_norm": 19.48565673828125, |
|
"learning_rate": 4.32e-05, |
|
"loss": 6.7488, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.06913890634820867, |
|
"grad_norm": 17.111894607543945, |
|
"learning_rate": 4.36e-05, |
|
"loss": 6.5613, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06976744186046512, |
|
"grad_norm": 14.604440689086914, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 6.0278, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.07039597737272156, |
|
"grad_norm": 19.691083908081055, |
|
"learning_rate": 4.44e-05, |
|
"loss": 6.6524, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.071024512884978, |
|
"grad_norm": 20.216869354248047, |
|
"learning_rate": 4.4800000000000005e-05, |
|
"loss": 7.1919, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.07165304839723445, |
|
"grad_norm": 12.34801197052002, |
|
"learning_rate": 4.52e-05, |
|
"loss": 5.8613, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.07228158390949088, |
|
"grad_norm": 10.295191764831543, |
|
"learning_rate": 4.5600000000000004e-05, |
|
"loss": 5.2475, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07291011942174733, |
|
"grad_norm": 15.049263954162598, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 6.1295, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.07353865493400377, |
|
"grad_norm": 12.287694931030273, |
|
"learning_rate": 4.64e-05, |
|
"loss": 5.7615, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.07416719044626022, |
|
"grad_norm": 12.11177921295166, |
|
"learning_rate": 4.6800000000000006e-05, |
|
"loss": 5.8763, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.07479572595851666, |
|
"grad_norm": 11.103803634643555, |
|
"learning_rate": 4.72e-05, |
|
"loss": 5.5072, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.0754242614707731, |
|
"grad_norm": 10.065237998962402, |
|
"learning_rate": 4.76e-05, |
|
"loss": 5.4734, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07605279698302954, |
|
"grad_norm": 15.039546012878418, |
|
"learning_rate": 4.8e-05, |
|
"loss": 6.2005, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.07668133249528598, |
|
"grad_norm": 10.25068473815918, |
|
"learning_rate": 4.8400000000000004e-05, |
|
"loss": 5.4703, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.07730986800754243, |
|
"grad_norm": 13.106711387634277, |
|
"learning_rate": 4.88e-05, |
|
"loss": 6.6926, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.07793840351979887, |
|
"grad_norm": 7.927108287811279, |
|
"learning_rate": 4.92e-05, |
|
"loss": 5.3666, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.0785669390320553, |
|
"grad_norm": 10.937745094299316, |
|
"learning_rate": 4.96e-05, |
|
"loss": 6.1474, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07919547454431175, |
|
"grad_norm": 10.88867473602295, |
|
"learning_rate": 5e-05, |
|
"loss": 6.1572, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.0798240100565682, |
|
"grad_norm": 11.629264831542969, |
|
"learning_rate": 5.0400000000000005e-05, |
|
"loss": 6.391, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.08045254556882464, |
|
"grad_norm": 10.296239852905273, |
|
"learning_rate": 5.08e-05, |
|
"loss": 5.9652, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.08108108108108109, |
|
"grad_norm": 11.038286209106445, |
|
"learning_rate": 5.1200000000000004e-05, |
|
"loss": 6.1295, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.08170961659333752, |
|
"grad_norm": 9.234803199768066, |
|
"learning_rate": 5.16e-05, |
|
"loss": 5.796, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08233815210559396, |
|
"grad_norm": 8.689288139343262, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 5.3704, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.08296668761785041, |
|
"grad_norm": 15.41921615600586, |
|
"learning_rate": 5.2400000000000007e-05, |
|
"loss": 6.0926, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.08359522313010685, |
|
"grad_norm": 8.419554710388184, |
|
"learning_rate": 5.28e-05, |
|
"loss": 5.8071, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.0842237586423633, |
|
"grad_norm": 8.644979476928711, |
|
"learning_rate": 5.3200000000000006e-05, |
|
"loss": 5.9138, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.08485229415461974, |
|
"grad_norm": 11.236026763916016, |
|
"learning_rate": 5.360000000000001e-05, |
|
"loss": 5.9092, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08548082966687617, |
|
"grad_norm": 9.669437408447266, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 5.3778, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.08610936517913262, |
|
"grad_norm": 10.624286651611328, |
|
"learning_rate": 5.440000000000001e-05, |
|
"loss": 5.4098, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.08673790069138906, |
|
"grad_norm": 8.831917762756348, |
|
"learning_rate": 5.4800000000000004e-05, |
|
"loss": 5.3806, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.08736643620364551, |
|
"grad_norm": 8.536581993103027, |
|
"learning_rate": 5.520000000000001e-05, |
|
"loss": 5.9237, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.08799497171590195, |
|
"grad_norm": 7.873721599578857, |
|
"learning_rate": 5.560000000000001e-05, |
|
"loss": 5.423, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08862350722815839, |
|
"grad_norm": 8.635188102722168, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 5.6118, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.08925204274041483, |
|
"grad_norm": 13.39542293548584, |
|
"learning_rate": 5.6399999999999995e-05, |
|
"loss": 5.2728, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.08988057825267128, |
|
"grad_norm": 12.998862266540527, |
|
"learning_rate": 5.68e-05, |
|
"loss": 6.9434, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.09050911376492772, |
|
"grad_norm": 8.149964332580566, |
|
"learning_rate": 5.72e-05, |
|
"loss": 5.3559, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.09113764927718417, |
|
"grad_norm": 7.66946268081665, |
|
"learning_rate": 5.76e-05, |
|
"loss": 5.5603, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.0917661847894406, |
|
"grad_norm": 9.277702331542969, |
|
"learning_rate": 5.8e-05, |
|
"loss": 4.9751, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.09239472030169704, |
|
"grad_norm": 7.442458152770996, |
|
"learning_rate": 5.8399999999999997e-05, |
|
"loss": 5.1653, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.09302325581395349, |
|
"grad_norm": 8.340120315551758, |
|
"learning_rate": 5.88e-05, |
|
"loss": 5.5422, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.09365179132620993, |
|
"grad_norm": 6.774919033050537, |
|
"learning_rate": 5.92e-05, |
|
"loss": 5.0187, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.09428032683846638, |
|
"grad_norm": 8.363201141357422, |
|
"learning_rate": 5.96e-05, |
|
"loss": 4.9803, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09490886235072282, |
|
"grad_norm": 7.182234764099121, |
|
"learning_rate": 6e-05, |
|
"loss": 5.2913, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.09553739786297925, |
|
"grad_norm": 9.065616607666016, |
|
"learning_rate": 6.04e-05, |
|
"loss": 5.8604, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.0961659333752357, |
|
"grad_norm": 7.823053359985352, |
|
"learning_rate": 6.08e-05, |
|
"loss": 5.7326, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.09679446888749214, |
|
"grad_norm": 8.177785873413086, |
|
"learning_rate": 6.12e-05, |
|
"loss": 5.5663, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.09742300439974859, |
|
"grad_norm": 8.24718952178955, |
|
"learning_rate": 6.16e-05, |
|
"loss": 5.5606, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.09805153991200503, |
|
"grad_norm": 10.260727882385254, |
|
"learning_rate": 6.2e-05, |
|
"loss": 5.7043, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.09868007542426147, |
|
"grad_norm": 11.80507755279541, |
|
"learning_rate": 6.24e-05, |
|
"loss": 6.0268, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.09930861093651791, |
|
"grad_norm": 9.024588584899902, |
|
"learning_rate": 6.280000000000001e-05, |
|
"loss": 6.2249, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.09993714644877436, |
|
"grad_norm": 6.814016342163086, |
|
"learning_rate": 6.32e-05, |
|
"loss": 5.0123, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.1005656819610308, |
|
"grad_norm": 6.701868534088135, |
|
"learning_rate": 6.36e-05, |
|
"loss": 4.9993, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1005656819610308, |
|
"eval_loss": 5.605074882507324, |
|
"eval_runtime": 1565.1736, |
|
"eval_samples_per_second": 1.647, |
|
"eval_steps_per_second": 1.647, |
|
"step": 160 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1591, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 160, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.0460495771690598e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|