|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.8045254556882464, |
|
"eval_steps": 160, |
|
"global_step": 1280, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006285355122564425, |
|
"grad_norm": 2.96633243560791, |
|
"learning_rate": 0.0, |
|
"loss": 7.5468, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0006285355122564425, |
|
"eval_loss": 7.076071739196777, |
|
"eval_runtime": 1556.6109, |
|
"eval_samples_per_second": 1.656, |
|
"eval_steps_per_second": 1.656, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001257071024512885, |
|
"grad_norm": 2.7123501300811768, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 6.3309, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0018856065367693275, |
|
"grad_norm": 2.4243223667144775, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 6.7763, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00251414204902577, |
|
"grad_norm": 2.319216728210449, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 6.8187, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0031426775612822125, |
|
"grad_norm": 2.4935989379882812, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 7.0526, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.003771213073538655, |
|
"grad_norm": 2.1718926429748535, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 6.7646, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0043997485857950975, |
|
"grad_norm": 2.177217960357666, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 6.6131, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.00502828409805154, |
|
"grad_norm": 2.9915874004364014, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 7.6544, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0056568196103079825, |
|
"grad_norm": 2.6981074810028076, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 6.799, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.006285355122564425, |
|
"grad_norm": 4.512426376342773, |
|
"learning_rate": 3.6e-06, |
|
"loss": 8.3895, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0069138906348208675, |
|
"grad_norm": 2.3968913555145264, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 6.3478, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.00754242614707731, |
|
"grad_norm": 1.9744027853012085, |
|
"learning_rate": 4.4e-06, |
|
"loss": 5.5536, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.008170961659333752, |
|
"grad_norm": 3.050507068634033, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 6.5491, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.008799497171590195, |
|
"grad_norm": 3.0034799575805664, |
|
"learning_rate": 5.2e-06, |
|
"loss": 7.2764, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.009428032683846637, |
|
"grad_norm": 2.323613405227661, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 6.8805, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01005656819610308, |
|
"grad_norm": 3.129593849182129, |
|
"learning_rate": 6e-06, |
|
"loss": 7.3171, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.010685103708359522, |
|
"grad_norm": 2.296137571334839, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 6.4251, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.011313639220615965, |
|
"grad_norm": 2.637282133102417, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 6.6142, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.011942174732872407, |
|
"grad_norm": 2.1313271522521973, |
|
"learning_rate": 7.2e-06, |
|
"loss": 6.3841, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01257071024512885, |
|
"grad_norm": 2.7492284774780273, |
|
"learning_rate": 7.6e-06, |
|
"loss": 6.8148, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.013199245757385292, |
|
"grad_norm": 2.945878267288208, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 6.9732, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.013827781269641735, |
|
"grad_norm": 3.709951162338257, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 7.7273, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.014456316781898177, |
|
"grad_norm": 3.023289203643799, |
|
"learning_rate": 8.8e-06, |
|
"loss": 7.0649, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01508485229415462, |
|
"grad_norm": 3.163715124130249, |
|
"learning_rate": 9.2e-06, |
|
"loss": 6.9342, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01571338780641106, |
|
"grad_norm": 4.114445686340332, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 8.119, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.016341923318667503, |
|
"grad_norm": 3.021068572998047, |
|
"learning_rate": 1e-05, |
|
"loss": 7.4067, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.01697045883092395, |
|
"grad_norm": 3.724407911300659, |
|
"learning_rate": 1.04e-05, |
|
"loss": 7.3869, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.01759899434318039, |
|
"grad_norm": 2.656257390975952, |
|
"learning_rate": 1.08e-05, |
|
"loss": 5.9961, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.018227529855436832, |
|
"grad_norm": 2.7785143852233887, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 5.6596, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.018856065367693273, |
|
"grad_norm": 3.130934715270996, |
|
"learning_rate": 1.16e-05, |
|
"loss": 6.6092, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01948460087994972, |
|
"grad_norm": 3.42301869392395, |
|
"learning_rate": 1.2e-05, |
|
"loss": 6.3373, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02011313639220616, |
|
"grad_norm": 2.8691611289978027, |
|
"learning_rate": 1.24e-05, |
|
"loss": 6.5923, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.020741671904462602, |
|
"grad_norm": 2.917086601257324, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 5.9773, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.021370207416719043, |
|
"grad_norm": 4.07196044921875, |
|
"learning_rate": 1.32e-05, |
|
"loss": 7.4423, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02199874292897549, |
|
"grad_norm": 4.738312244415283, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 7.5113, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02262727844123193, |
|
"grad_norm": 3.898664712905884, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 6.8738, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.023255813953488372, |
|
"grad_norm": 3.7448792457580566, |
|
"learning_rate": 1.44e-05, |
|
"loss": 6.9615, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.023884349465744813, |
|
"grad_norm": 3.5938379764556885, |
|
"learning_rate": 1.48e-05, |
|
"loss": 6.0651, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02451288497800126, |
|
"grad_norm": 4.253636360168457, |
|
"learning_rate": 1.52e-05, |
|
"loss": 6.9986, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0251414204902577, |
|
"grad_norm": 4.985451698303223, |
|
"learning_rate": 1.56e-05, |
|
"loss": 7.252, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.025769956002514142, |
|
"grad_norm": 4.376275062561035, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 6.8091, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.026398491514770583, |
|
"grad_norm": 4.697645664215088, |
|
"learning_rate": 1.6400000000000002e-05, |
|
"loss": 6.4319, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.02702702702702703, |
|
"grad_norm": 5.258227348327637, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 6.7672, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.02765556253928347, |
|
"grad_norm": 5.063000679016113, |
|
"learning_rate": 1.7199999999999998e-05, |
|
"loss": 6.3354, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.028284098051539912, |
|
"grad_norm": 4.573636531829834, |
|
"learning_rate": 1.76e-05, |
|
"loss": 6.3374, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.028912633563796353, |
|
"grad_norm": 4.72340202331543, |
|
"learning_rate": 1.8e-05, |
|
"loss": 6.6553, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0295411690760528, |
|
"grad_norm": 6.681248664855957, |
|
"learning_rate": 1.84e-05, |
|
"loss": 7.7157, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03016970458830924, |
|
"grad_norm": 5.952408313751221, |
|
"learning_rate": 1.88e-05, |
|
"loss": 5.8215, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.030798240100565682, |
|
"grad_norm": 6.599308967590332, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 6.921, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03142677561282212, |
|
"grad_norm": 6.538867473602295, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 7.1274, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03205531112507857, |
|
"grad_norm": 5.91294527053833, |
|
"learning_rate": 2e-05, |
|
"loss": 6.7263, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.03268384663733501, |
|
"grad_norm": 7.943373203277588, |
|
"learning_rate": 2.04e-05, |
|
"loss": 7.4335, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03331238214959145, |
|
"grad_norm": 7.023540496826172, |
|
"learning_rate": 2.08e-05, |
|
"loss": 6.3428, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0339409176618479, |
|
"grad_norm": 7.031850814819336, |
|
"learning_rate": 2.12e-05, |
|
"loss": 6.2423, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.034569453174104335, |
|
"grad_norm": 6.891653537750244, |
|
"learning_rate": 2.16e-05, |
|
"loss": 6.4081, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03519798868636078, |
|
"grad_norm": 8.165786743164062, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 7.1081, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.035826524198617225, |
|
"grad_norm": 9.146330833435059, |
|
"learning_rate": 2.2400000000000002e-05, |
|
"loss": 7.4309, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.036455059710873663, |
|
"grad_norm": 8.637526512145996, |
|
"learning_rate": 2.2800000000000002e-05, |
|
"loss": 6.9889, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03708359522313011, |
|
"grad_norm": 8.397353172302246, |
|
"learning_rate": 2.32e-05, |
|
"loss": 5.8222, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03771213073538655, |
|
"grad_norm": 9.219857215881348, |
|
"learning_rate": 2.36e-05, |
|
"loss": 7.0839, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03834066624764299, |
|
"grad_norm": 8.762686729431152, |
|
"learning_rate": 2.4e-05, |
|
"loss": 6.192, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03896920175989944, |
|
"grad_norm": 9.55370044708252, |
|
"learning_rate": 2.44e-05, |
|
"loss": 6.2674, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.039597737272155875, |
|
"grad_norm": 10.248711585998535, |
|
"learning_rate": 2.48e-05, |
|
"loss": 7.4737, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.04022627278441232, |
|
"grad_norm": 9.455551147460938, |
|
"learning_rate": 2.5200000000000003e-05, |
|
"loss": 6.5796, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.04085480829666876, |
|
"grad_norm": 10.217570304870605, |
|
"learning_rate": 2.5600000000000002e-05, |
|
"loss": 6.8329, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.041483343808925204, |
|
"grad_norm": 12.418697357177734, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 7.0127, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.04211187932118165, |
|
"grad_norm": 13.362143516540527, |
|
"learning_rate": 2.64e-05, |
|
"loss": 6.3027, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.04274041483343809, |
|
"grad_norm": 10.577826499938965, |
|
"learning_rate": 2.6800000000000004e-05, |
|
"loss": 6.2406, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.04336895034569453, |
|
"grad_norm": 13.15530776977539, |
|
"learning_rate": 2.7200000000000004e-05, |
|
"loss": 7.3093, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.04399748585795098, |
|
"grad_norm": 10.775976181030273, |
|
"learning_rate": 2.7600000000000003e-05, |
|
"loss": 5.9398, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.044626021370207415, |
|
"grad_norm": 12.925591468811035, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 6.7144, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.04525455688246386, |
|
"grad_norm": 12.626113891601562, |
|
"learning_rate": 2.84e-05, |
|
"loss": 6.5783, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0458830923947203, |
|
"grad_norm": 14.04005241394043, |
|
"learning_rate": 2.88e-05, |
|
"loss": 6.1111, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.046511627906976744, |
|
"grad_norm": 14.279847145080566, |
|
"learning_rate": 2.9199999999999998e-05, |
|
"loss": 5.7717, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04714016341923319, |
|
"grad_norm": 13.597288131713867, |
|
"learning_rate": 2.96e-05, |
|
"loss": 6.7714, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04776869893148963, |
|
"grad_norm": 12.93455696105957, |
|
"learning_rate": 3e-05, |
|
"loss": 6.5082, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.04839723444374607, |
|
"grad_norm": 12.823902130126953, |
|
"learning_rate": 3.04e-05, |
|
"loss": 6.7108, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.04902576995600252, |
|
"grad_norm": 16.93589973449707, |
|
"learning_rate": 3.08e-05, |
|
"loss": 6.2773, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.049654305468258955, |
|
"grad_norm": 14.550610542297363, |
|
"learning_rate": 3.12e-05, |
|
"loss": 6.4329, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.0502828409805154, |
|
"grad_norm": 11.992222785949707, |
|
"learning_rate": 3.16e-05, |
|
"loss": 5.3369, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05091137649277184, |
|
"grad_norm": 12.921990394592285, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 5.8783, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.051539912005028284, |
|
"grad_norm": 14.483002662658691, |
|
"learning_rate": 3.24e-05, |
|
"loss": 7.4091, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.05216844751728473, |
|
"grad_norm": 15.877086639404297, |
|
"learning_rate": 3.2800000000000004e-05, |
|
"loss": 6.9743, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.05279698302954117, |
|
"grad_norm": 15.85240650177002, |
|
"learning_rate": 3.32e-05, |
|
"loss": 7.0001, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.05342551854179761, |
|
"grad_norm": 17.0369815826416, |
|
"learning_rate": 3.3600000000000004e-05, |
|
"loss": 5.8022, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05405405405405406, |
|
"grad_norm": 15.62733268737793, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 6.8172, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.054682589566310495, |
|
"grad_norm": 18.79668426513672, |
|
"learning_rate": 3.4399999999999996e-05, |
|
"loss": 6.6659, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.05531112507856694, |
|
"grad_norm": 15.78856086730957, |
|
"learning_rate": 3.48e-05, |
|
"loss": 5.7255, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.05593966059082338, |
|
"grad_norm": 13.196313858032227, |
|
"learning_rate": 3.52e-05, |
|
"loss": 5.8423, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.056568196103079824, |
|
"grad_norm": 16.4498233795166, |
|
"learning_rate": 3.56e-05, |
|
"loss": 5.4969, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05719673161533627, |
|
"grad_norm": 15.243169784545898, |
|
"learning_rate": 3.6e-05, |
|
"loss": 6.0186, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.05782526712759271, |
|
"grad_norm": 17.294044494628906, |
|
"learning_rate": 3.6400000000000004e-05, |
|
"loss": 6.8408, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.05845380263984915, |
|
"grad_norm": 19.32530975341797, |
|
"learning_rate": 3.68e-05, |
|
"loss": 6.5562, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0590823381521056, |
|
"grad_norm": 20.01514434814453, |
|
"learning_rate": 3.72e-05, |
|
"loss": 6.4273, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.059710873664362035, |
|
"grad_norm": 19.90467643737793, |
|
"learning_rate": 3.76e-05, |
|
"loss": 7.2277, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06033940917661848, |
|
"grad_norm": 17.582975387573242, |
|
"learning_rate": 3.8e-05, |
|
"loss": 7.0173, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.06096794468887492, |
|
"grad_norm": 17.471969604492188, |
|
"learning_rate": 3.8400000000000005e-05, |
|
"loss": 6.6919, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.061596480201131364, |
|
"grad_norm": 16.06781005859375, |
|
"learning_rate": 3.88e-05, |
|
"loss": 6.54, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.06222501571338781, |
|
"grad_norm": 17.99164390563965, |
|
"learning_rate": 3.9200000000000004e-05, |
|
"loss": 5.9168, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.06285355122564425, |
|
"grad_norm": 16.01885986328125, |
|
"learning_rate": 3.960000000000001e-05, |
|
"loss": 5.9236, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06348208673790069, |
|
"grad_norm": 9.705466270446777, |
|
"learning_rate": 4e-05, |
|
"loss": 4.4168, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.06411062225015714, |
|
"grad_norm": 21.02065086364746, |
|
"learning_rate": 4.0400000000000006e-05, |
|
"loss": 6.7312, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.06473915776241358, |
|
"grad_norm": 16.56360626220703, |
|
"learning_rate": 4.08e-05, |
|
"loss": 6.5865, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.06536769327467001, |
|
"grad_norm": 17.319807052612305, |
|
"learning_rate": 4.12e-05, |
|
"loss": 6.4122, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.06599622878692646, |
|
"grad_norm": 21.3114070892334, |
|
"learning_rate": 4.16e-05, |
|
"loss": 6.9886, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.0666247642991829, |
|
"grad_norm": 21.147327423095703, |
|
"learning_rate": 4.2e-05, |
|
"loss": 6.7595, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.06725329981143935, |
|
"grad_norm": 16.264938354492188, |
|
"learning_rate": 4.24e-05, |
|
"loss": 6.114, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.0678818353236958, |
|
"grad_norm": 20.424713134765625, |
|
"learning_rate": 4.2800000000000004e-05, |
|
"loss": 6.9508, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.06851037083595223, |
|
"grad_norm": 19.48565673828125, |
|
"learning_rate": 4.32e-05, |
|
"loss": 6.7488, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.06913890634820867, |
|
"grad_norm": 17.111894607543945, |
|
"learning_rate": 4.36e-05, |
|
"loss": 6.5613, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06976744186046512, |
|
"grad_norm": 14.604440689086914, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 6.0278, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.07039597737272156, |
|
"grad_norm": 19.691083908081055, |
|
"learning_rate": 4.44e-05, |
|
"loss": 6.6524, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.071024512884978, |
|
"grad_norm": 20.216869354248047, |
|
"learning_rate": 4.4800000000000005e-05, |
|
"loss": 7.1919, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.07165304839723445, |
|
"grad_norm": 12.34801197052002, |
|
"learning_rate": 4.52e-05, |
|
"loss": 5.8613, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.07228158390949088, |
|
"grad_norm": 10.295191764831543, |
|
"learning_rate": 4.5600000000000004e-05, |
|
"loss": 5.2475, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07291011942174733, |
|
"grad_norm": 15.049263954162598, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 6.1295, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.07353865493400377, |
|
"grad_norm": 12.287694931030273, |
|
"learning_rate": 4.64e-05, |
|
"loss": 5.7615, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.07416719044626022, |
|
"grad_norm": 12.11177921295166, |
|
"learning_rate": 4.6800000000000006e-05, |
|
"loss": 5.8763, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.07479572595851666, |
|
"grad_norm": 11.103803634643555, |
|
"learning_rate": 4.72e-05, |
|
"loss": 5.5072, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.0754242614707731, |
|
"grad_norm": 10.065237998962402, |
|
"learning_rate": 4.76e-05, |
|
"loss": 5.4734, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07605279698302954, |
|
"grad_norm": 15.039546012878418, |
|
"learning_rate": 4.8e-05, |
|
"loss": 6.2005, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.07668133249528598, |
|
"grad_norm": 10.25068473815918, |
|
"learning_rate": 4.8400000000000004e-05, |
|
"loss": 5.4703, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.07730986800754243, |
|
"grad_norm": 13.106711387634277, |
|
"learning_rate": 4.88e-05, |
|
"loss": 6.6926, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.07793840351979887, |
|
"grad_norm": 7.927108287811279, |
|
"learning_rate": 4.92e-05, |
|
"loss": 5.3666, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.0785669390320553, |
|
"grad_norm": 10.937745094299316, |
|
"learning_rate": 4.96e-05, |
|
"loss": 6.1474, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07919547454431175, |
|
"grad_norm": 10.88867473602295, |
|
"learning_rate": 5e-05, |
|
"loss": 6.1572, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.0798240100565682, |
|
"grad_norm": 11.629264831542969, |
|
"learning_rate": 5.0400000000000005e-05, |
|
"loss": 6.391, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.08045254556882464, |
|
"grad_norm": 10.296239852905273, |
|
"learning_rate": 5.08e-05, |
|
"loss": 5.9652, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.08108108108108109, |
|
"grad_norm": 11.038286209106445, |
|
"learning_rate": 5.1200000000000004e-05, |
|
"loss": 6.1295, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.08170961659333752, |
|
"grad_norm": 9.234803199768066, |
|
"learning_rate": 5.16e-05, |
|
"loss": 5.796, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08233815210559396, |
|
"grad_norm": 8.689288139343262, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 5.3704, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.08296668761785041, |
|
"grad_norm": 15.41921615600586, |
|
"learning_rate": 5.2400000000000007e-05, |
|
"loss": 6.0926, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.08359522313010685, |
|
"grad_norm": 8.419554710388184, |
|
"learning_rate": 5.28e-05, |
|
"loss": 5.8071, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.0842237586423633, |
|
"grad_norm": 8.644979476928711, |
|
"learning_rate": 5.3200000000000006e-05, |
|
"loss": 5.9138, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.08485229415461974, |
|
"grad_norm": 11.236026763916016, |
|
"learning_rate": 5.360000000000001e-05, |
|
"loss": 5.9092, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08548082966687617, |
|
"grad_norm": 9.669437408447266, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 5.3778, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.08610936517913262, |
|
"grad_norm": 10.624286651611328, |
|
"learning_rate": 5.440000000000001e-05, |
|
"loss": 5.4098, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.08673790069138906, |
|
"grad_norm": 8.831917762756348, |
|
"learning_rate": 5.4800000000000004e-05, |
|
"loss": 5.3806, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.08736643620364551, |
|
"grad_norm": 8.536581993103027, |
|
"learning_rate": 5.520000000000001e-05, |
|
"loss": 5.9237, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.08799497171590195, |
|
"grad_norm": 7.873721599578857, |
|
"learning_rate": 5.560000000000001e-05, |
|
"loss": 5.423, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08862350722815839, |
|
"grad_norm": 8.635188102722168, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 5.6118, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.08925204274041483, |
|
"grad_norm": 13.39542293548584, |
|
"learning_rate": 5.6399999999999995e-05, |
|
"loss": 5.2728, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.08988057825267128, |
|
"grad_norm": 12.998862266540527, |
|
"learning_rate": 5.68e-05, |
|
"loss": 6.9434, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.09050911376492772, |
|
"grad_norm": 8.149964332580566, |
|
"learning_rate": 5.72e-05, |
|
"loss": 5.3559, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.09113764927718417, |
|
"grad_norm": 7.66946268081665, |
|
"learning_rate": 5.76e-05, |
|
"loss": 5.5603, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.0917661847894406, |
|
"grad_norm": 9.277702331542969, |
|
"learning_rate": 5.8e-05, |
|
"loss": 4.9751, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.09239472030169704, |
|
"grad_norm": 7.442458152770996, |
|
"learning_rate": 5.8399999999999997e-05, |
|
"loss": 5.1653, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.09302325581395349, |
|
"grad_norm": 8.340120315551758, |
|
"learning_rate": 5.88e-05, |
|
"loss": 5.5422, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.09365179132620993, |
|
"grad_norm": 6.774919033050537, |
|
"learning_rate": 5.92e-05, |
|
"loss": 5.0187, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.09428032683846638, |
|
"grad_norm": 8.363201141357422, |
|
"learning_rate": 5.96e-05, |
|
"loss": 4.9803, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09490886235072282, |
|
"grad_norm": 7.182234764099121, |
|
"learning_rate": 6e-05, |
|
"loss": 5.2913, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.09553739786297925, |
|
"grad_norm": 9.065616607666016, |
|
"learning_rate": 6.04e-05, |
|
"loss": 5.8604, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.0961659333752357, |
|
"grad_norm": 7.823053359985352, |
|
"learning_rate": 6.08e-05, |
|
"loss": 5.7326, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.09679446888749214, |
|
"grad_norm": 8.177785873413086, |
|
"learning_rate": 6.12e-05, |
|
"loss": 5.5663, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.09742300439974859, |
|
"grad_norm": 8.24718952178955, |
|
"learning_rate": 6.16e-05, |
|
"loss": 5.5606, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.09805153991200503, |
|
"grad_norm": 10.260727882385254, |
|
"learning_rate": 6.2e-05, |
|
"loss": 5.7043, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.09868007542426147, |
|
"grad_norm": 11.80507755279541, |
|
"learning_rate": 6.24e-05, |
|
"loss": 6.0268, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.09930861093651791, |
|
"grad_norm": 9.024588584899902, |
|
"learning_rate": 6.280000000000001e-05, |
|
"loss": 6.2249, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.09993714644877436, |
|
"grad_norm": 6.814016342163086, |
|
"learning_rate": 6.32e-05, |
|
"loss": 5.0123, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.1005656819610308, |
|
"grad_norm": 6.701868534088135, |
|
"learning_rate": 6.36e-05, |
|
"loss": 4.9993, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1005656819610308, |
|
"eval_loss": 5.605074882507324, |
|
"eval_runtime": 1565.1736, |
|
"eval_samples_per_second": 1.647, |
|
"eval_steps_per_second": 1.647, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.10119421747328725, |
|
"grad_norm": 14.352143287658691, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 5.8635, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.10182275298554368, |
|
"grad_norm": 7.681995868682861, |
|
"learning_rate": 6.440000000000001e-05, |
|
"loss": 5.2773, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.10245128849780012, |
|
"grad_norm": 10.032212257385254, |
|
"learning_rate": 6.48e-05, |
|
"loss": 5.4718, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.10307982401005657, |
|
"grad_norm": 6.617798805236816, |
|
"learning_rate": 6.52e-05, |
|
"loss": 4.7259, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.10370835952231301, |
|
"grad_norm": 7.318296432495117, |
|
"learning_rate": 6.560000000000001e-05, |
|
"loss": 4.9979, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.10433689503456946, |
|
"grad_norm": 9.494800567626953, |
|
"learning_rate": 6.6e-05, |
|
"loss": 5.4222, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.1049654305468259, |
|
"grad_norm": 6.2663798332214355, |
|
"learning_rate": 6.64e-05, |
|
"loss": 4.806, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.10559396605908233, |
|
"grad_norm": 7.666187286376953, |
|
"learning_rate": 6.680000000000001e-05, |
|
"loss": 5.583, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.10622250157133878, |
|
"grad_norm": 6.545588493347168, |
|
"learning_rate": 6.720000000000001e-05, |
|
"loss": 5.4927, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.10685103708359522, |
|
"grad_norm": 7.650016784667969, |
|
"learning_rate": 6.76e-05, |
|
"loss": 5.6347, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.10747957259585167, |
|
"grad_norm": 6.05757474899292, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 4.8323, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.10810810810810811, |
|
"grad_norm": 8.37306022644043, |
|
"learning_rate": 6.840000000000001e-05, |
|
"loss": 5.5312, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.10873664362036455, |
|
"grad_norm": 6.950132369995117, |
|
"learning_rate": 6.879999999999999e-05, |
|
"loss": 5.216, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.10936517913262099, |
|
"grad_norm": 7.643744945526123, |
|
"learning_rate": 6.92e-05, |
|
"loss": 5.374, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.10999371464487744, |
|
"grad_norm": 12.621402740478516, |
|
"learning_rate": 6.96e-05, |
|
"loss": 6.3553, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.11062225015713388, |
|
"grad_norm": 6.861866474151611, |
|
"learning_rate": 7e-05, |
|
"loss": 5.0186, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.11125078566939033, |
|
"grad_norm": 7.023342609405518, |
|
"learning_rate": 7.04e-05, |
|
"loss": 4.7693, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.11187932118164676, |
|
"grad_norm": 7.627071380615234, |
|
"learning_rate": 7.08e-05, |
|
"loss": 5.5354, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.1125078566939032, |
|
"grad_norm": 7.475245952606201, |
|
"learning_rate": 7.12e-05, |
|
"loss": 5.5916, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.11313639220615965, |
|
"grad_norm": 6.348559379577637, |
|
"learning_rate": 7.16e-05, |
|
"loss": 4.7914, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.11376492771841609, |
|
"grad_norm": 6.895409107208252, |
|
"learning_rate": 7.2e-05, |
|
"loss": 5.4136, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.11439346323067254, |
|
"grad_norm": 5.746918201446533, |
|
"learning_rate": 7.24e-05, |
|
"loss": 4.8071, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.11502199874292897, |
|
"grad_norm": 7.965058326721191, |
|
"learning_rate": 7.280000000000001e-05, |
|
"loss": 4.6486, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.11565053425518541, |
|
"grad_norm": 8.059706687927246, |
|
"learning_rate": 7.32e-05, |
|
"loss": 5.3917, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.11627906976744186, |
|
"grad_norm": 7.2255353927612305, |
|
"learning_rate": 7.36e-05, |
|
"loss": 5.0876, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1169076052796983, |
|
"grad_norm": 6.53950834274292, |
|
"learning_rate": 7.4e-05, |
|
"loss": 4.7656, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.11753614079195475, |
|
"grad_norm": 6.379930019378662, |
|
"learning_rate": 7.44e-05, |
|
"loss": 4.6405, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.1181646763042112, |
|
"grad_norm": 7.332822322845459, |
|
"learning_rate": 7.48e-05, |
|
"loss": 4.6839, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.11879321181646763, |
|
"grad_norm": 7.975095272064209, |
|
"learning_rate": 7.52e-05, |
|
"loss": 4.8748, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.11942174732872407, |
|
"grad_norm": 7.8162360191345215, |
|
"learning_rate": 7.560000000000001e-05, |
|
"loss": 4.9994, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.12005028284098052, |
|
"grad_norm": 7.749350070953369, |
|
"learning_rate": 7.6e-05, |
|
"loss": 4.7996, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.12067881835323696, |
|
"grad_norm": 12.814620018005371, |
|
"learning_rate": 7.64e-05, |
|
"loss": 5.4858, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.1213073538654934, |
|
"grad_norm": 7.985275745391846, |
|
"learning_rate": 7.680000000000001e-05, |
|
"loss": 4.9178, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.12193588937774984, |
|
"grad_norm": 7.037015438079834, |
|
"learning_rate": 7.72e-05, |
|
"loss": 4.4436, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.12256442489000628, |
|
"grad_norm": 12.540748596191406, |
|
"learning_rate": 7.76e-05, |
|
"loss": 5.3947, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.12319296040226273, |
|
"grad_norm": 8.698318481445312, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 4.6521, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.12382149591451917, |
|
"grad_norm": 6.837226867675781, |
|
"learning_rate": 7.840000000000001e-05, |
|
"loss": 4.4502, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.12445003142677562, |
|
"grad_norm": 6.902530670166016, |
|
"learning_rate": 7.88e-05, |
|
"loss": 5.2111, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.12507856693903205, |
|
"grad_norm": 7.714503288269043, |
|
"learning_rate": 7.920000000000001e-05, |
|
"loss": 4.5983, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.1257071024512885, |
|
"grad_norm": 9.488588333129883, |
|
"learning_rate": 7.960000000000001e-05, |
|
"loss": 5.496, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.12633563796354494, |
|
"grad_norm": 9.41787052154541, |
|
"learning_rate": 8e-05, |
|
"loss": 4.8966, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.12696417347580138, |
|
"grad_norm": 7.63663911819458, |
|
"learning_rate": 8.04e-05, |
|
"loss": 4.6692, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.12759270898805783, |
|
"grad_norm": 8.535087585449219, |
|
"learning_rate": 8.080000000000001e-05, |
|
"loss": 5.0543, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.12822124450031427, |
|
"grad_norm": 8.822395324707031, |
|
"learning_rate": 8.120000000000001e-05, |
|
"loss": 4.7249, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.12884978001257072, |
|
"grad_norm": 7.7394561767578125, |
|
"learning_rate": 8.16e-05, |
|
"loss": 4.5474, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.12947831552482716, |
|
"grad_norm": 9.459196090698242, |
|
"learning_rate": 8.2e-05, |
|
"loss": 5.3748, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.13010685103708358, |
|
"grad_norm": 6.900613784790039, |
|
"learning_rate": 8.24e-05, |
|
"loss": 4.5716, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.13073538654934003, |
|
"grad_norm": 7.961105823516846, |
|
"learning_rate": 8.28e-05, |
|
"loss": 4.5067, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.13136392206159647, |
|
"grad_norm": 10.71306037902832, |
|
"learning_rate": 8.32e-05, |
|
"loss": 4.8309, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.13199245757385292, |
|
"grad_norm": 8.900117874145508, |
|
"learning_rate": 8.36e-05, |
|
"loss": 5.0913, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.13262099308610936, |
|
"grad_norm": 9.863144874572754, |
|
"learning_rate": 8.4e-05, |
|
"loss": 4.9469, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.1332495285983658, |
|
"grad_norm": 7.620909690856934, |
|
"learning_rate": 8.44e-05, |
|
"loss": 4.484, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.13387806411062225, |
|
"grad_norm": 10.667424201965332, |
|
"learning_rate": 8.48e-05, |
|
"loss": 5.1474, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.1345065996228787, |
|
"grad_norm": 11.249619483947754, |
|
"learning_rate": 8.52e-05, |
|
"loss": 4.6222, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.13513513513513514, |
|
"grad_norm": 8.625935554504395, |
|
"learning_rate": 8.560000000000001e-05, |
|
"loss": 4.4776, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.1357636706473916, |
|
"grad_norm": 8.138617515563965, |
|
"learning_rate": 8.6e-05, |
|
"loss": 4.7134, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.13639220615964803, |
|
"grad_norm": 7.485641956329346, |
|
"learning_rate": 8.64e-05, |
|
"loss": 4.8201, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.13702074167190445, |
|
"grad_norm": 11.87485408782959, |
|
"learning_rate": 8.680000000000001e-05, |
|
"loss": 4.7288, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.1376492771841609, |
|
"grad_norm": 8.551025390625, |
|
"learning_rate": 8.72e-05, |
|
"loss": 4.3091, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.13827781269641734, |
|
"grad_norm": 10.597816467285156, |
|
"learning_rate": 8.76e-05, |
|
"loss": 4.9491, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.13890634820867379, |
|
"grad_norm": 84.34934997558594, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 7.8641, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.13953488372093023, |
|
"grad_norm": 14.08538818359375, |
|
"learning_rate": 8.840000000000001e-05, |
|
"loss": 5.1718, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.14016341923318668, |
|
"grad_norm": 8.5634183883667, |
|
"learning_rate": 8.88e-05, |
|
"loss": 4.5042, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.14079195474544312, |
|
"grad_norm": 9.070499420166016, |
|
"learning_rate": 8.92e-05, |
|
"loss": 4.684, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.14142049025769957, |
|
"grad_norm": 13.369037628173828, |
|
"learning_rate": 8.960000000000001e-05, |
|
"loss": 4.2649, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.142049025769956, |
|
"grad_norm": 8.797863960266113, |
|
"learning_rate": 9e-05, |
|
"loss": 4.0673, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.14267756128221246, |
|
"grad_norm": 11.846273422241211, |
|
"learning_rate": 9.04e-05, |
|
"loss": 4.3157, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.1433060967944689, |
|
"grad_norm": 9.263139724731445, |
|
"learning_rate": 9.080000000000001e-05, |
|
"loss": 4.2926, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.14393463230672532, |
|
"grad_norm": 6.980598449707031, |
|
"learning_rate": 9.120000000000001e-05, |
|
"loss": 4.5284, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.14456316781898176, |
|
"grad_norm": 7.539294719696045, |
|
"learning_rate": 9.16e-05, |
|
"loss": 4.0574, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.1451917033312382, |
|
"grad_norm": 10.551584243774414, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 4.8214, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.14582023884349465, |
|
"grad_norm": 8.377208709716797, |
|
"learning_rate": 9.240000000000001e-05, |
|
"loss": 4.1492, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.1464487743557511, |
|
"grad_norm": 8.809621810913086, |
|
"learning_rate": 9.28e-05, |
|
"loss": 3.9481, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.14707730986800754, |
|
"grad_norm": 15.098812103271484, |
|
"learning_rate": 9.320000000000002e-05, |
|
"loss": 4.5192, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.147705845380264, |
|
"grad_norm": 16.269895553588867, |
|
"learning_rate": 9.360000000000001e-05, |
|
"loss": 4.5962, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.14833438089252043, |
|
"grad_norm": 9.021276473999023, |
|
"learning_rate": 9.4e-05, |
|
"loss": 4.6729, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.14896291640477688, |
|
"grad_norm": 10.110734939575195, |
|
"learning_rate": 9.44e-05, |
|
"loss": 4.5957, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.14959145191703332, |
|
"grad_norm": 10.254793167114258, |
|
"learning_rate": 9.48e-05, |
|
"loss": 4.1534, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.15021998742928974, |
|
"grad_norm": 10.400528907775879, |
|
"learning_rate": 9.52e-05, |
|
"loss": 4.5035, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.1508485229415462, |
|
"grad_norm": 13.529309272766113, |
|
"learning_rate": 9.56e-05, |
|
"loss": 4.7124, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.15147705845380263, |
|
"grad_norm": 9.187389373779297, |
|
"learning_rate": 9.6e-05, |
|
"loss": 4.2564, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.15210559396605908, |
|
"grad_norm": 9.760189056396484, |
|
"learning_rate": 9.64e-05, |
|
"loss": 4.5863, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.15273412947831552, |
|
"grad_norm": 11.033064842224121, |
|
"learning_rate": 9.680000000000001e-05, |
|
"loss": 4.4069, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.15336266499057197, |
|
"grad_norm": 8.79299259185791, |
|
"learning_rate": 9.72e-05, |
|
"loss": 4.2232, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.1539912005028284, |
|
"grad_norm": 10.526970863342285, |
|
"learning_rate": 9.76e-05, |
|
"loss": 4.6675, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.15461973601508486, |
|
"grad_norm": 14.993115425109863, |
|
"learning_rate": 9.8e-05, |
|
"loss": 4.3974, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.1552482715273413, |
|
"grad_norm": 8.852916717529297, |
|
"learning_rate": 9.84e-05, |
|
"loss": 4.1023, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.15587680703959775, |
|
"grad_norm": 17.305910110473633, |
|
"learning_rate": 9.88e-05, |
|
"loss": 4.2657, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.1565053425518542, |
|
"grad_norm": 12.877272605895996, |
|
"learning_rate": 9.92e-05, |
|
"loss": 4.318, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.1571338780641106, |
|
"grad_norm": 12.16046142578125, |
|
"learning_rate": 9.960000000000001e-05, |
|
"loss": 4.0387, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.15776241357636706, |
|
"grad_norm": 13.384797096252441, |
|
"learning_rate": 0.0001, |
|
"loss": 4.3935, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.1583909490886235, |
|
"grad_norm": 12.947620391845703, |
|
"learning_rate": 0.0001004, |
|
"loss": 4.2752, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.15901948460087995, |
|
"grad_norm": 13.004393577575684, |
|
"learning_rate": 0.00010080000000000001, |
|
"loss": 4.4285, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.1596480201131364, |
|
"grad_norm": 10.64500617980957, |
|
"learning_rate": 0.00010120000000000001, |
|
"loss": 4.3775, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.16027655562539284, |
|
"grad_norm": 10.208504676818848, |
|
"learning_rate": 0.0001016, |
|
"loss": 4.3942, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.16090509113764928, |
|
"grad_norm": 10.392770767211914, |
|
"learning_rate": 0.00010200000000000001, |
|
"loss": 3.964, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.16153362664990573, |
|
"grad_norm": 11.704667091369629, |
|
"learning_rate": 0.00010240000000000001, |
|
"loss": 3.2764, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.16216216216216217, |
|
"grad_norm": 11.466705322265625, |
|
"learning_rate": 0.0001028, |
|
"loss": 4.3286, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.16279069767441862, |
|
"grad_norm": 11.332155227661133, |
|
"learning_rate": 0.0001032, |
|
"loss": 4.4598, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.16341923318667503, |
|
"grad_norm": 10.597373962402344, |
|
"learning_rate": 0.00010360000000000001, |
|
"loss": 4.3576, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.16404776869893148, |
|
"grad_norm": 13.826549530029297, |
|
"learning_rate": 0.00010400000000000001, |
|
"loss": 4.1209, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.16467630421118792, |
|
"grad_norm": 12.812018394470215, |
|
"learning_rate": 0.0001044, |
|
"loss": 4.3951, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.16530483972344437, |
|
"grad_norm": 11.573945045471191, |
|
"learning_rate": 0.00010480000000000001, |
|
"loss": 4.6093, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.16593337523570081, |
|
"grad_norm": 9.195262908935547, |
|
"learning_rate": 0.00010520000000000001, |
|
"loss": 4.0342, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.16656191074795726, |
|
"grad_norm": 10.87674331665039, |
|
"learning_rate": 0.0001056, |
|
"loss": 3.9368, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.1671904462602137, |
|
"grad_norm": 11.914413452148438, |
|
"learning_rate": 0.00010600000000000002, |
|
"loss": 4.0043, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.16781898177247015, |
|
"grad_norm": 11.688831329345703, |
|
"learning_rate": 0.00010640000000000001, |
|
"loss": 3.6894, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.1684475172847266, |
|
"grad_norm": 17.798152923583984, |
|
"learning_rate": 0.00010680000000000001, |
|
"loss": 4.4073, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.16907605279698304, |
|
"grad_norm": 11.202817916870117, |
|
"learning_rate": 0.00010720000000000002, |
|
"loss": 3.9713, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.16970458830923948, |
|
"grad_norm": 11.484283447265625, |
|
"learning_rate": 0.00010760000000000001, |
|
"loss": 3.7401, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.1703331238214959, |
|
"grad_norm": 8.625189781188965, |
|
"learning_rate": 0.00010800000000000001, |
|
"loss": 3.6847, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.17096165933375235, |
|
"grad_norm": 14.114002227783203, |
|
"learning_rate": 0.00010840000000000002, |
|
"loss": 4.1115, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.1715901948460088, |
|
"grad_norm": 12.885270118713379, |
|
"learning_rate": 0.00010880000000000002, |
|
"loss": 3.5002, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.17221873035826524, |
|
"grad_norm": 13.46296501159668, |
|
"learning_rate": 0.00010920000000000001, |
|
"loss": 4.0249, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.17284726587052168, |
|
"grad_norm": 17.22674560546875, |
|
"learning_rate": 0.00010960000000000001, |
|
"loss": 4.4484, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.17347580138277813, |
|
"grad_norm": 13.292183876037598, |
|
"learning_rate": 0.00011000000000000002, |
|
"loss": 3.9718, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.17410433689503457, |
|
"grad_norm": 35.4398307800293, |
|
"learning_rate": 0.00011040000000000001, |
|
"loss": 4.0052, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.17473287240729102, |
|
"grad_norm": 19.460407257080078, |
|
"learning_rate": 0.00011080000000000001, |
|
"loss": 4.1892, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.17536140791954746, |
|
"grad_norm": 15.537309646606445, |
|
"learning_rate": 0.00011120000000000002, |
|
"loss": 3.9878, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.1759899434318039, |
|
"grad_norm": 16.156564712524414, |
|
"learning_rate": 0.00011160000000000002, |
|
"loss": 3.8753, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.17661847894406035, |
|
"grad_norm": 17.241336822509766, |
|
"learning_rate": 0.00011200000000000001, |
|
"loss": 3.9479, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.17724701445631677, |
|
"grad_norm": 20.027856826782227, |
|
"learning_rate": 0.00011240000000000002, |
|
"loss": 3.9856, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.17787554996857322, |
|
"grad_norm": 11.107442855834961, |
|
"learning_rate": 0.00011279999999999999, |
|
"loss": 3.5669, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.17850408548082966, |
|
"grad_norm": 14.415189743041992, |
|
"learning_rate": 0.0001132, |
|
"loss": 4.1763, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.1791326209930861, |
|
"grad_norm": 16.054901123046875, |
|
"learning_rate": 0.0001136, |
|
"loss": 4.1089, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.17976115650534255, |
|
"grad_norm": 17.718029022216797, |
|
"learning_rate": 0.00011399999999999999, |
|
"loss": 3.7917, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.180389692017599, |
|
"grad_norm": 15.520782470703125, |
|
"learning_rate": 0.0001144, |
|
"loss": 3.9876, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.18101822752985544, |
|
"grad_norm": 19.573312759399414, |
|
"learning_rate": 0.0001148, |
|
"loss": 3.9456, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.1816467630421119, |
|
"grad_norm": 11.45984935760498, |
|
"learning_rate": 0.0001152, |
|
"loss": 3.5074, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.18227529855436833, |
|
"grad_norm": 21.8663387298584, |
|
"learning_rate": 0.00011559999999999999, |
|
"loss": 4.0081, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.18290383406662478, |
|
"grad_norm": 16.874753952026367, |
|
"learning_rate": 0.000116, |
|
"loss": 3.9281, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.1835323695788812, |
|
"grad_norm": 16.98707389831543, |
|
"learning_rate": 0.0001164, |
|
"loss": 3.9644, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.18416090509113764, |
|
"grad_norm": 12.668580055236816, |
|
"learning_rate": 0.00011679999999999999, |
|
"loss": 3.3014, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.18478944060339408, |
|
"grad_norm": 11.530134201049805, |
|
"learning_rate": 0.0001172, |
|
"loss": 3.3005, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.18541797611565053, |
|
"grad_norm": 53.85222244262695, |
|
"learning_rate": 0.0001176, |
|
"loss": 4.4674, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.18604651162790697, |
|
"grad_norm": 12.703718185424805, |
|
"learning_rate": 0.000118, |
|
"loss": 3.629, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.18667504714016342, |
|
"grad_norm": 18.4884090423584, |
|
"learning_rate": 0.0001184, |
|
"loss": 3.7944, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.18730358265241986, |
|
"grad_norm": 18.730792999267578, |
|
"learning_rate": 0.0001188, |
|
"loss": 3.6521, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.1879321181646763, |
|
"grad_norm": 10.74968433380127, |
|
"learning_rate": 0.0001192, |
|
"loss": 3.6895, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.18856065367693275, |
|
"grad_norm": 13.541333198547363, |
|
"learning_rate": 0.00011960000000000001, |
|
"loss": 3.4517, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1891891891891892, |
|
"grad_norm": 20.38814926147461, |
|
"learning_rate": 0.00012, |
|
"loss": 3.3987, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.18981772470144564, |
|
"grad_norm": 21.178226470947266, |
|
"learning_rate": 0.0001204, |
|
"loss": 3.6073, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.19044626021370206, |
|
"grad_norm": 11.370046615600586, |
|
"learning_rate": 0.0001208, |
|
"loss": 3.3391, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.1910747957259585, |
|
"grad_norm": 10.467302322387695, |
|
"learning_rate": 0.0001212, |
|
"loss": 3.7676, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.19170333123821495, |
|
"grad_norm": 11.307904243469238, |
|
"learning_rate": 0.0001216, |
|
"loss": 3.4048, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.1923318667504714, |
|
"grad_norm": 20.741159439086914, |
|
"learning_rate": 0.000122, |
|
"loss": 3.1971, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.19296040226272784, |
|
"grad_norm": 22.384532928466797, |
|
"learning_rate": 0.0001224, |
|
"loss": 3.2315, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.1935889377749843, |
|
"grad_norm": 19.073701858520508, |
|
"learning_rate": 0.0001228, |
|
"loss": 3.3426, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.19421747328724073, |
|
"grad_norm": 20.864418029785156, |
|
"learning_rate": 0.0001232, |
|
"loss": 3.4605, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.19484600879949718, |
|
"grad_norm": 15.888052940368652, |
|
"learning_rate": 0.0001236, |
|
"loss": 3.5554, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.19547454431175362, |
|
"grad_norm": 13.650964736938477, |
|
"learning_rate": 0.000124, |
|
"loss": 3.3372, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.19610307982401007, |
|
"grad_norm": 20.19765853881836, |
|
"learning_rate": 0.00012440000000000002, |
|
"loss": 3.4044, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.19673161533626649, |
|
"grad_norm": 18.977705001831055, |
|
"learning_rate": 0.0001248, |
|
"loss": 3.5597, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.19736015084852293, |
|
"grad_norm": 13.097780227661133, |
|
"learning_rate": 0.0001252, |
|
"loss": 3.2755, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.19798868636077938, |
|
"grad_norm": 17.84178924560547, |
|
"learning_rate": 0.00012560000000000002, |
|
"loss": 3.2663, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.19861722187303582, |
|
"grad_norm": 14.159675598144531, |
|
"learning_rate": 0.000126, |
|
"loss": 3.4946, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.19924575738529227, |
|
"grad_norm": 11.891308784484863, |
|
"learning_rate": 0.0001264, |
|
"loss": 2.8523, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.1998742928975487, |
|
"grad_norm": 15.97802734375, |
|
"learning_rate": 0.00012680000000000002, |
|
"loss": 3.2556, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.20050282840980516, |
|
"grad_norm": 13.670428276062012, |
|
"learning_rate": 0.0001272, |
|
"loss": 3.4079, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.2011313639220616, |
|
"grad_norm": 15.742596626281738, |
|
"learning_rate": 0.0001276, |
|
"loss": 3.358, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.2011313639220616, |
|
"eval_loss": 2.595961093902588, |
|
"eval_runtime": 1561.7692, |
|
"eval_samples_per_second": 1.651, |
|
"eval_steps_per_second": 1.651, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.20175989943431805, |
|
"grad_norm": 18.516000747680664, |
|
"learning_rate": 0.00012800000000000002, |
|
"loss": 3.4599, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.2023884349465745, |
|
"grad_norm": 12.767962455749512, |
|
"learning_rate": 0.0001284, |
|
"loss": 3.2328, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.20301697045883094, |
|
"grad_norm": 18.14620018005371, |
|
"learning_rate": 0.00012880000000000001, |
|
"loss": 3.5025, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.20364550597108735, |
|
"grad_norm": 16.109487533569336, |
|
"learning_rate": 0.00012920000000000002, |
|
"loss": 3.5465, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.2042740414833438, |
|
"grad_norm": 26.81826400756836, |
|
"learning_rate": 0.0001296, |
|
"loss": 3.7021, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.20490257699560024, |
|
"grad_norm": 18.376623153686523, |
|
"learning_rate": 0.00013000000000000002, |
|
"loss": 3.3511, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.2055311125078567, |
|
"grad_norm": 16.065563201904297, |
|
"learning_rate": 0.0001304, |
|
"loss": 3.0496, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.20615964802011313, |
|
"grad_norm": 19.734437942504883, |
|
"learning_rate": 0.0001308, |
|
"loss": 3.3436, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.20678818353236958, |
|
"grad_norm": 15.109872817993164, |
|
"learning_rate": 0.00013120000000000002, |
|
"loss": 3.1295, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.20741671904462602, |
|
"grad_norm": 13.069215774536133, |
|
"learning_rate": 0.0001316, |
|
"loss": 3.018, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.20804525455688247, |
|
"grad_norm": 12.85176944732666, |
|
"learning_rate": 0.000132, |
|
"loss": 3.0171, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.20867379006913891, |
|
"grad_norm": 20.065465927124023, |
|
"learning_rate": 0.00013240000000000002, |
|
"loss": 3.1725, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.20930232558139536, |
|
"grad_norm": 17.447635650634766, |
|
"learning_rate": 0.0001328, |
|
"loss": 2.9863, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.2099308610936518, |
|
"grad_norm": 20.958803176879883, |
|
"learning_rate": 0.0001332, |
|
"loss": 3.236, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.21055939660590822, |
|
"grad_norm": 24.92554473876953, |
|
"learning_rate": 0.00013360000000000002, |
|
"loss": 2.9125, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.21118793211816467, |
|
"grad_norm": 12.4500150680542, |
|
"learning_rate": 0.000134, |
|
"loss": 2.4232, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.2118164676304211, |
|
"grad_norm": 19.983152389526367, |
|
"learning_rate": 0.00013440000000000001, |
|
"loss": 3.2391, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.21244500314267756, |
|
"grad_norm": 14.444880485534668, |
|
"learning_rate": 0.00013480000000000002, |
|
"loss": 2.8852, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.213073538654934, |
|
"grad_norm": 17.165590286254883, |
|
"learning_rate": 0.0001352, |
|
"loss": 3.2699, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.21370207416719045, |
|
"grad_norm": 16.150278091430664, |
|
"learning_rate": 0.00013560000000000002, |
|
"loss": 3.066, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.2143306096794469, |
|
"grad_norm": 27.125503540039062, |
|
"learning_rate": 0.00013600000000000003, |
|
"loss": 3.1006, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.21495914519170334, |
|
"grad_norm": 18.716642379760742, |
|
"learning_rate": 0.0001364, |
|
"loss": 2.9524, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.21558768070395978, |
|
"grad_norm": 14.65403938293457, |
|
"learning_rate": 0.00013680000000000002, |
|
"loss": 2.9555, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.21621621621621623, |
|
"grad_norm": 15.025317192077637, |
|
"learning_rate": 0.00013720000000000003, |
|
"loss": 2.7592, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.21684475172847265, |
|
"grad_norm": 23.24863052368164, |
|
"learning_rate": 0.00013759999999999998, |
|
"loss": 3.2544, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.2174732872407291, |
|
"grad_norm": 14.683283805847168, |
|
"learning_rate": 0.000138, |
|
"loss": 3.1798, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.21810182275298554, |
|
"grad_norm": 19.991384506225586, |
|
"learning_rate": 0.0001384, |
|
"loss": 3.0177, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.21873035826524198, |
|
"grad_norm": 20.53168296813965, |
|
"learning_rate": 0.00013879999999999999, |
|
"loss": 3.182, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.21935889377749843, |
|
"grad_norm": 23.478450775146484, |
|
"learning_rate": 0.0001392, |
|
"loss": 3.268, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.21998742928975487, |
|
"grad_norm": 15.088510513305664, |
|
"learning_rate": 0.0001396, |
|
"loss": 2.7058, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.22061596480201132, |
|
"grad_norm": 16.69074821472168, |
|
"learning_rate": 0.00014, |
|
"loss": 2.9989, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.22124450031426776, |
|
"grad_norm": 24.132953643798828, |
|
"learning_rate": 0.0001404, |
|
"loss": 2.8016, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.2218730358265242, |
|
"grad_norm": 13.829506874084473, |
|
"learning_rate": 0.0001408, |
|
"loss": 2.9762, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.22250157133878065, |
|
"grad_norm": 11.858349800109863, |
|
"learning_rate": 0.0001412, |
|
"loss": 3.0663, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.2231301068510371, |
|
"grad_norm": 17.9423885345459, |
|
"learning_rate": 0.0001416, |
|
"loss": 3.0817, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.22375864236329351, |
|
"grad_norm": 17.859312057495117, |
|
"learning_rate": 0.000142, |
|
"loss": 3.2717, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.22438717787554996, |
|
"grad_norm": 24.465381622314453, |
|
"learning_rate": 0.0001424, |
|
"loss": 3.0278, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.2250157133878064, |
|
"grad_norm": 31.655973434448242, |
|
"learning_rate": 0.0001428, |
|
"loss": 2.5686, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.22564424890006285, |
|
"grad_norm": 23.032289505004883, |
|
"learning_rate": 0.0001432, |
|
"loss": 2.8256, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.2262727844123193, |
|
"grad_norm": 9.913031578063965, |
|
"learning_rate": 0.0001436, |
|
"loss": 2.6509, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.22690131992457574, |
|
"grad_norm": 14.368906021118164, |
|
"learning_rate": 0.000144, |
|
"loss": 2.7751, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.22752985543683218, |
|
"grad_norm": 14.559577941894531, |
|
"learning_rate": 0.0001444, |
|
"loss": 2.7144, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.22815839094908863, |
|
"grad_norm": 11.854909896850586, |
|
"learning_rate": 0.0001448, |
|
"loss": 2.6202, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.22878692646134507, |
|
"grad_norm": 15.724352836608887, |
|
"learning_rate": 0.0001452, |
|
"loss": 2.9942, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.22941546197360152, |
|
"grad_norm": 18.073122024536133, |
|
"learning_rate": 0.00014560000000000002, |
|
"loss": 2.7307, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.23004399748585794, |
|
"grad_norm": 13.995152473449707, |
|
"learning_rate": 0.000146, |
|
"loss": 2.5305, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.23067253299811438, |
|
"grad_norm": 24.081336975097656, |
|
"learning_rate": 0.0001464, |
|
"loss": 2.9235, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.23130106851037083, |
|
"grad_norm": 15.185482025146484, |
|
"learning_rate": 0.00014680000000000002, |
|
"loss": 2.8097, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.23192960402262727, |
|
"grad_norm": 17.898988723754883, |
|
"learning_rate": 0.0001472, |
|
"loss": 2.7808, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"grad_norm": 11.116854667663574, |
|
"learning_rate": 0.0001476, |
|
"loss": 2.9811, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.23318667504714016, |
|
"grad_norm": 16.254613876342773, |
|
"learning_rate": 0.000148, |
|
"loss": 2.6689, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.2338152105593966, |
|
"grad_norm": 15.59523868560791, |
|
"learning_rate": 0.0001484, |
|
"loss": 2.6041, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.23444374607165305, |
|
"grad_norm": 12.621963500976562, |
|
"learning_rate": 0.0001488, |
|
"loss": 2.889, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.2350722815839095, |
|
"grad_norm": 21.332027435302734, |
|
"learning_rate": 0.0001492, |
|
"loss": 2.6854, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.23570081709616594, |
|
"grad_norm": 25.096506118774414, |
|
"learning_rate": 0.0001496, |
|
"loss": 3.0866, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.2363293526084224, |
|
"grad_norm": 21.07834815979004, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 2.8421, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.2369578881206788, |
|
"grad_norm": 56.05162048339844, |
|
"learning_rate": 0.0001504, |
|
"loss": 2.8167, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.23758642363293525, |
|
"grad_norm": 33.98356246948242, |
|
"learning_rate": 0.0001508, |
|
"loss": 2.9985, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.2382149591451917, |
|
"grad_norm": 31.202903747558594, |
|
"learning_rate": 0.00015120000000000002, |
|
"loss": 2.3907, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.23884349465744814, |
|
"grad_norm": 12.752126693725586, |
|
"learning_rate": 0.0001516, |
|
"loss": 2.4548, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.23947203016970459, |
|
"grad_norm": 18.89848518371582, |
|
"learning_rate": 0.000152, |
|
"loss": 2.5799, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.24010056568196103, |
|
"grad_norm": 12.335553169250488, |
|
"learning_rate": 0.00015240000000000002, |
|
"loss": 2.4312, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.24072910119421748, |
|
"grad_norm": 14.589320182800293, |
|
"learning_rate": 0.0001528, |
|
"loss": 2.7752, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.24135763670647392, |
|
"grad_norm": 19.386524200439453, |
|
"learning_rate": 0.0001532, |
|
"loss": 2.6769, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.24198617221873037, |
|
"grad_norm": 11.126659393310547, |
|
"learning_rate": 0.00015360000000000002, |
|
"loss": 2.6094, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.2426147077309868, |
|
"grad_norm": 12.37399673461914, |
|
"learning_rate": 0.000154, |
|
"loss": 2.4149, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.24324324324324326, |
|
"grad_norm": 9.323434829711914, |
|
"learning_rate": 0.0001544, |
|
"loss": 2.6124, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.24387177875549967, |
|
"grad_norm": 15.187394142150879, |
|
"learning_rate": 0.00015480000000000002, |
|
"loss": 2.2974, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.24450031426775612, |
|
"grad_norm": 14.098670959472656, |
|
"learning_rate": 0.0001552, |
|
"loss": 2.5228, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.24512884978001256, |
|
"grad_norm": 22.275894165039062, |
|
"learning_rate": 0.00015560000000000001, |
|
"loss": 2.5581, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.245757385292269, |
|
"grad_norm": 23.715696334838867, |
|
"learning_rate": 0.00015600000000000002, |
|
"loss": 2.5637, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.24638592080452545, |
|
"grad_norm": 12.024730682373047, |
|
"learning_rate": 0.0001564, |
|
"loss": 2.6799, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.2470144563167819, |
|
"grad_norm": 128.14434814453125, |
|
"learning_rate": 0.00015680000000000002, |
|
"loss": 4.7508, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.24764299182903834, |
|
"grad_norm": 9.767908096313477, |
|
"learning_rate": 0.00015720000000000003, |
|
"loss": 2.8455, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.2482715273412948, |
|
"grad_norm": 14.82866382598877, |
|
"learning_rate": 0.0001576, |
|
"loss": 2.7026, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.24890006285355123, |
|
"grad_norm": 17.91452407836914, |
|
"learning_rate": 0.00015800000000000002, |
|
"loss": 2.3927, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.24952859836580768, |
|
"grad_norm": 9.49526596069336, |
|
"learning_rate": 0.00015840000000000003, |
|
"loss": 2.3653, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.2501571338780641, |
|
"grad_norm": 11.44519329071045, |
|
"learning_rate": 0.0001588, |
|
"loss": 2.7203, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.25078566939032054, |
|
"grad_norm": 11.376216888427734, |
|
"learning_rate": 0.00015920000000000002, |
|
"loss": 2.5542, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.251414204902577, |
|
"grad_norm": 18.18035888671875, |
|
"learning_rate": 0.0001596, |
|
"loss": 2.6502, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.25204274041483343, |
|
"grad_norm": 12.473836898803711, |
|
"learning_rate": 0.00016, |
|
"loss": 2.1878, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.2526712759270899, |
|
"grad_norm": 13.397380828857422, |
|
"learning_rate": 0.00016040000000000002, |
|
"loss": 2.384, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.2532998114393463, |
|
"grad_norm": 14.128639221191406, |
|
"learning_rate": 0.0001608, |
|
"loss": 2.4143, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.25392834695160277, |
|
"grad_norm": 14.258450508117676, |
|
"learning_rate": 0.00016120000000000002, |
|
"loss": 2.4701, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.2545568824638592, |
|
"grad_norm": 13.462814331054688, |
|
"learning_rate": 0.00016160000000000002, |
|
"loss": 2.3382, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.25518541797611566, |
|
"grad_norm": 9.772506713867188, |
|
"learning_rate": 0.000162, |
|
"loss": 2.5515, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.2558139534883721, |
|
"grad_norm": 15.334366798400879, |
|
"learning_rate": 0.00016240000000000002, |
|
"loss": 2.4442, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.25644248900062855, |
|
"grad_norm": 8.635390281677246, |
|
"learning_rate": 0.0001628, |
|
"loss": 2.3196, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.257071024512885, |
|
"grad_norm": 10.05532455444336, |
|
"learning_rate": 0.0001632, |
|
"loss": 2.3188, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.25769956002514144, |
|
"grad_norm": 9.961398124694824, |
|
"learning_rate": 0.0001636, |
|
"loss": 2.6808, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2583280955373979, |
|
"grad_norm": 12.767550468444824, |
|
"learning_rate": 0.000164, |
|
"loss": 2.3732, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.25895663104965433, |
|
"grad_norm": 9.951260566711426, |
|
"learning_rate": 0.0001644, |
|
"loss": 2.4287, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.2595851665619108, |
|
"grad_norm": 11.40839958190918, |
|
"learning_rate": 0.0001648, |
|
"loss": 2.1428, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.26021370207416716, |
|
"grad_norm": 13.8335542678833, |
|
"learning_rate": 0.0001652, |
|
"loss": 2.4355, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.2608422375864236, |
|
"grad_norm": 14.79465389251709, |
|
"learning_rate": 0.0001656, |
|
"loss": 2.6137, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.26147077309868005, |
|
"grad_norm": 12.087128639221191, |
|
"learning_rate": 0.000166, |
|
"loss": 2.5661, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.2620993086109365, |
|
"grad_norm": 66.9678726196289, |
|
"learning_rate": 0.0001664, |
|
"loss": 2.905, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.26272784412319294, |
|
"grad_norm": 36.95875549316406, |
|
"learning_rate": 0.0001668, |
|
"loss": 2.5886, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.2633563796354494, |
|
"grad_norm": 16.434181213378906, |
|
"learning_rate": 0.0001672, |
|
"loss": 2.4761, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.26398491514770583, |
|
"grad_norm": 17.993885040283203, |
|
"learning_rate": 0.0001676, |
|
"loss": 2.5257, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.2646134506599623, |
|
"grad_norm": 15.398675918579102, |
|
"learning_rate": 0.000168, |
|
"loss": 2.3469, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.2652419861722187, |
|
"grad_norm": 15.613100051879883, |
|
"learning_rate": 0.0001684, |
|
"loss": 2.614, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.26587052168447517, |
|
"grad_norm": 14.203115463256836, |
|
"learning_rate": 0.0001688, |
|
"loss": 2.5969, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.2664990571967316, |
|
"grad_norm": 18.7460880279541, |
|
"learning_rate": 0.0001692, |
|
"loss": 2.6752, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.26712759270898806, |
|
"grad_norm": 15.008576393127441, |
|
"learning_rate": 0.0001696, |
|
"loss": 2.3482, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2677561282212445, |
|
"grad_norm": 12.07258415222168, |
|
"learning_rate": 0.00017, |
|
"loss": 2.0798, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.26838466373350095, |
|
"grad_norm": 25.918943405151367, |
|
"learning_rate": 0.0001704, |
|
"loss": 2.4682, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.2690131992457574, |
|
"grad_norm": 6.374201774597168, |
|
"learning_rate": 0.0001708, |
|
"loss": 2.3433, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.26964173475801384, |
|
"grad_norm": 13.480561256408691, |
|
"learning_rate": 0.00017120000000000001, |
|
"loss": 2.1633, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.2702702702702703, |
|
"grad_norm": 19.05206871032715, |
|
"learning_rate": 0.0001716, |
|
"loss": 2.5378, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.27089880578252673, |
|
"grad_norm": 28.43244743347168, |
|
"learning_rate": 0.000172, |
|
"loss": 2.4796, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.2715273412947832, |
|
"grad_norm": 19.896743774414062, |
|
"learning_rate": 0.00017240000000000002, |
|
"loss": 2.169, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.2721558768070396, |
|
"grad_norm": 31.176748275756836, |
|
"learning_rate": 0.0001728, |
|
"loss": 2.1738, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.27278441231929607, |
|
"grad_norm": 5.707266330718994, |
|
"learning_rate": 0.0001732, |
|
"loss": 2.1389, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.27341294783155246, |
|
"grad_norm": 19.774263381958008, |
|
"learning_rate": 0.00017360000000000002, |
|
"loss": 2.6666, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.2740414833438089, |
|
"grad_norm": 19.493816375732422, |
|
"learning_rate": 0.000174, |
|
"loss": 2.2989, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.27467001885606535, |
|
"grad_norm": 16.929563522338867, |
|
"learning_rate": 0.0001744, |
|
"loss": 2.3341, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.2752985543683218, |
|
"grad_norm": 9.590645790100098, |
|
"learning_rate": 0.00017480000000000002, |
|
"loss": 2.5236, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.27592708988057824, |
|
"grad_norm": 7.97600793838501, |
|
"learning_rate": 0.0001752, |
|
"loss": 2.2307, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.2765556253928347, |
|
"grad_norm": 7.129279613494873, |
|
"learning_rate": 0.0001756, |
|
"loss": 2.4411, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.2771841609050911, |
|
"grad_norm": 9.409801483154297, |
|
"learning_rate": 0.00017600000000000002, |
|
"loss": 2.3803, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.27781269641734757, |
|
"grad_norm": 6.196171283721924, |
|
"learning_rate": 0.0001764, |
|
"loss": 2.5736, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.278441231929604, |
|
"grad_norm": 9.682530403137207, |
|
"learning_rate": 0.00017680000000000001, |
|
"loss": 2.1353, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.27906976744186046, |
|
"grad_norm": 20.320688247680664, |
|
"learning_rate": 0.0001772, |
|
"loss": 2.2107, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.2796983029541169, |
|
"grad_norm": 20.578222274780273, |
|
"learning_rate": 0.0001776, |
|
"loss": 2.0656, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.28032683846637335, |
|
"grad_norm": 33.76546859741211, |
|
"learning_rate": 0.00017800000000000002, |
|
"loss": 2.2221, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.2809553739786298, |
|
"grad_norm": 9.561470985412598, |
|
"learning_rate": 0.0001784, |
|
"loss": 2.5248, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.28158390949088624, |
|
"grad_norm": 15.284167289733887, |
|
"learning_rate": 0.0001788, |
|
"loss": 2.1954, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.2822124450031427, |
|
"grad_norm": 27.233264923095703, |
|
"learning_rate": 0.00017920000000000002, |
|
"loss": 2.1467, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.28284098051539913, |
|
"grad_norm": 8.692459106445312, |
|
"learning_rate": 0.0001796, |
|
"loss": 2.4224, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2834695160276556, |
|
"grad_norm": 23.0391845703125, |
|
"learning_rate": 0.00018, |
|
"loss": 2.3062, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.284098051539912, |
|
"grad_norm": 33.97864532470703, |
|
"learning_rate": 0.00018040000000000002, |
|
"loss": 2.5164, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.28472658705216847, |
|
"grad_norm": 7.73515510559082, |
|
"learning_rate": 0.0001808, |
|
"loss": 2.3682, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.2853551225644249, |
|
"grad_norm": 10.368907928466797, |
|
"learning_rate": 0.0001812, |
|
"loss": 2.1135, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.28598365807668136, |
|
"grad_norm": 9.352826118469238, |
|
"learning_rate": 0.00018160000000000002, |
|
"loss": 2.1376, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2866121935889378, |
|
"grad_norm": 226.2757110595703, |
|
"learning_rate": 0.000182, |
|
"loss": 4.9444, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.2872407291011942, |
|
"grad_norm": 14.291020393371582, |
|
"learning_rate": 0.00018240000000000002, |
|
"loss": 2.2792, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.28786926461345064, |
|
"grad_norm": 14.201563835144043, |
|
"learning_rate": 0.00018280000000000003, |
|
"loss": 1.9881, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.2884978001257071, |
|
"grad_norm": 6.268864154815674, |
|
"learning_rate": 0.0001832, |
|
"loss": 2.2419, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.2891263356379635, |
|
"grad_norm": 7.990386486053467, |
|
"learning_rate": 0.00018360000000000002, |
|
"loss": 2.1261, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.28975487115022, |
|
"grad_norm": 12.354260444641113, |
|
"learning_rate": 0.00018400000000000003, |
|
"loss": 1.8171, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.2903834066624764, |
|
"grad_norm": 8.037335395812988, |
|
"learning_rate": 0.0001844, |
|
"loss": 2.1203, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.29101194217473286, |
|
"grad_norm": 12.988319396972656, |
|
"learning_rate": 0.00018480000000000002, |
|
"loss": 2.0204, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.2916404776869893, |
|
"grad_norm": 21.61684799194336, |
|
"learning_rate": 0.00018520000000000003, |
|
"loss": 2.3283, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.29226901319924575, |
|
"grad_norm": 33.48648452758789, |
|
"learning_rate": 0.0001856, |
|
"loss": 2.4362, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.2928975487115022, |
|
"grad_norm": 24.978181838989258, |
|
"learning_rate": 0.00018600000000000002, |
|
"loss": 1.9866, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.29352608422375864, |
|
"grad_norm": 12.113028526306152, |
|
"learning_rate": 0.00018640000000000003, |
|
"loss": 2.1763, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.2941546197360151, |
|
"grad_norm": 15.348548889160156, |
|
"learning_rate": 0.00018680000000000001, |
|
"loss": 2.4879, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.29478315524827153, |
|
"grad_norm": 7.841541767120361, |
|
"learning_rate": 0.00018720000000000002, |
|
"loss": 2.469, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.295411690760528, |
|
"grad_norm": 9.705923080444336, |
|
"learning_rate": 0.0001876, |
|
"loss": 2.3946, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.2960402262727844, |
|
"grad_norm": 13.874395370483398, |
|
"learning_rate": 0.000188, |
|
"loss": 1.9966, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.29666876178504087, |
|
"grad_norm": 14.510929107666016, |
|
"learning_rate": 0.0001884, |
|
"loss": 1.9661, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.2972972972972973, |
|
"grad_norm": 9.351888656616211, |
|
"learning_rate": 0.0001888, |
|
"loss": 2.2073, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.29792583280955376, |
|
"grad_norm": 9.50893783569336, |
|
"learning_rate": 0.0001892, |
|
"loss": 2.2, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.2985543683218102, |
|
"grad_norm": 6.685521602630615, |
|
"learning_rate": 0.0001896, |
|
"loss": 2.3768, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.29918290383406665, |
|
"grad_norm": 43.20718002319336, |
|
"learning_rate": 0.00019, |
|
"loss": 2.1069, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.2998114393463231, |
|
"grad_norm": 13.695243835449219, |
|
"learning_rate": 0.0001904, |
|
"loss": 2.1093, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.3004399748585795, |
|
"grad_norm": 10.47667121887207, |
|
"learning_rate": 0.0001908, |
|
"loss": 2.0928, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.30106851037083593, |
|
"grad_norm": 28.000720977783203, |
|
"learning_rate": 0.0001912, |
|
"loss": 1.949, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.3016970458830924, |
|
"grad_norm": 6.865454196929932, |
|
"learning_rate": 0.0001916, |
|
"loss": 1.809, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3016970458830924, |
|
"eval_loss": 1.3915096521377563, |
|
"eval_runtime": 1572.1918, |
|
"eval_samples_per_second": 1.64, |
|
"eval_steps_per_second": 1.64, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3023255813953488, |
|
"grad_norm": 5.930575847625732, |
|
"learning_rate": 0.000192, |
|
"loss": 2.1386, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.30295411690760526, |
|
"grad_norm": 7.512749671936035, |
|
"learning_rate": 0.00019240000000000001, |
|
"loss": 2.3738, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.3035826524198617, |
|
"grad_norm": 10.778457641601562, |
|
"learning_rate": 0.0001928, |
|
"loss": 1.9493, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.30421118793211815, |
|
"grad_norm": 12.165234565734863, |
|
"learning_rate": 0.0001932, |
|
"loss": 2.1425, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.3048397234443746, |
|
"grad_norm": 6.66940450668335, |
|
"learning_rate": 0.00019360000000000002, |
|
"loss": 2.2216, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.30546825895663104, |
|
"grad_norm": 10.975049018859863, |
|
"learning_rate": 0.000194, |
|
"loss": 2.3318, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.3060967944688875, |
|
"grad_norm": 7.881476402282715, |
|
"learning_rate": 0.0001944, |
|
"loss": 2.3984, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.30672532998114393, |
|
"grad_norm": 7.184864044189453, |
|
"learning_rate": 0.0001948, |
|
"loss": 1.9152, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.3073538654934004, |
|
"grad_norm": 8.655898094177246, |
|
"learning_rate": 0.0001952, |
|
"loss": 2.0512, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.3079824010056568, |
|
"grad_norm": 9.263758659362793, |
|
"learning_rate": 0.0001956, |
|
"loss": 2.0096, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.30861093651791327, |
|
"grad_norm": 5.204033374786377, |
|
"learning_rate": 0.000196, |
|
"loss": 2.3024, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.3092394720301697, |
|
"grad_norm": 10.690205574035645, |
|
"learning_rate": 0.0001964, |
|
"loss": 2.2217, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.30986800754242616, |
|
"grad_norm": 9.402382850646973, |
|
"learning_rate": 0.0001968, |
|
"loss": 2.0444, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.3104965430546826, |
|
"grad_norm": 28.077444076538086, |
|
"learning_rate": 0.0001972, |
|
"loss": 2.3741, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.31112507856693905, |
|
"grad_norm": 13.81711483001709, |
|
"learning_rate": 0.0001976, |
|
"loss": 1.6765, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.3117536140791955, |
|
"grad_norm": 10.126565933227539, |
|
"learning_rate": 0.00019800000000000002, |
|
"loss": 2.1858, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.31238214959145194, |
|
"grad_norm": 9.726242065429688, |
|
"learning_rate": 0.0001984, |
|
"loss": 1.9308, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.3130106851037084, |
|
"grad_norm": 5.540197372436523, |
|
"learning_rate": 0.0001988, |
|
"loss": 1.9632, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.3136392206159648, |
|
"grad_norm": 11.275084495544434, |
|
"learning_rate": 0.00019920000000000002, |
|
"loss": 1.8719, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.3142677561282212, |
|
"grad_norm": 8.379544258117676, |
|
"learning_rate": 0.0001996, |
|
"loss": 2.1343, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.31489629164047767, |
|
"grad_norm": 7.902101039886475, |
|
"learning_rate": 0.0002, |
|
"loss": 2.0374, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.3155248271527341, |
|
"grad_norm": 5.915253162384033, |
|
"learning_rate": 0.00019999962686803274, |
|
"loss": 1.9273, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.31615336266499056, |
|
"grad_norm": 4.51748514175415, |
|
"learning_rate": 0.0001999985074752248, |
|
"loss": 1.7414, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.316781898177247, |
|
"grad_norm": 82.88562774658203, |
|
"learning_rate": 0.000199996641830858, |
|
"loss": 2.4431, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.31741043368950345, |
|
"grad_norm": 16.860361099243164, |
|
"learning_rate": 0.00019999402995040202, |
|
"loss": 2.031, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.3180389692017599, |
|
"grad_norm": 10.03348159790039, |
|
"learning_rate": 0.000199990671855514, |
|
"loss": 2.1866, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.31866750471401634, |
|
"grad_norm": 7.5439629554748535, |
|
"learning_rate": 0.00019998656757403872, |
|
"loss": 1.9684, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.3192960402262728, |
|
"grad_norm": 8.82386302947998, |
|
"learning_rate": 0.00019998171714000814, |
|
"loss": 2.1364, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.3199245757385292, |
|
"grad_norm": 8.354187965393066, |
|
"learning_rate": 0.00019997612059364118, |
|
"loss": 1.6715, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.32055311125078567, |
|
"grad_norm": 6.262081146240234, |
|
"learning_rate": 0.0001999697779813434, |
|
"loss": 2.1308, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.3211816467630421, |
|
"grad_norm": 12.487313270568848, |
|
"learning_rate": 0.0001999626893557066, |
|
"loss": 1.8659, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.32181018227529856, |
|
"grad_norm": 13.1874418258667, |
|
"learning_rate": 0.00019995485477550848, |
|
"loss": 1.9808, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.322438717787555, |
|
"grad_norm": 6.41532039642334, |
|
"learning_rate": 0.00019994627430571188, |
|
"loss": 2.1896, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.32306725329981145, |
|
"grad_norm": 8.269707679748535, |
|
"learning_rate": 0.00019993694801746456, |
|
"loss": 1.9233, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.3236957888120679, |
|
"grad_norm": 7.2478556632995605, |
|
"learning_rate": 0.00019992687598809847, |
|
"loss": 2.0612, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.32432432432432434, |
|
"grad_norm": 3.833040952682495, |
|
"learning_rate": 0.00019991605830112902, |
|
"loss": 2.1114, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.3249528598365808, |
|
"grad_norm": 38.45088577270508, |
|
"learning_rate": 0.00019990449504625458, |
|
"loss": 2.0145, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.32558139534883723, |
|
"grad_norm": 6.172846794128418, |
|
"learning_rate": 0.00019989218631935555, |
|
"loss": 2.0293, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.3262099308610937, |
|
"grad_norm": 12.175736427307129, |
|
"learning_rate": 0.00019987913222249378, |
|
"loss": 1.8907, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.32683846637335007, |
|
"grad_norm": 17.118629455566406, |
|
"learning_rate": 0.00019986533286391145, |
|
"loss": 2.0689, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.3274670018856065, |
|
"grad_norm": 10.824390411376953, |
|
"learning_rate": 0.0001998507883580304, |
|
"loss": 1.8524, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.32809553739786296, |
|
"grad_norm": 11.699383735656738, |
|
"learning_rate": 0.00019983549882545104, |
|
"loss": 1.9415, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.3287240729101194, |
|
"grad_norm": 6.905187606811523, |
|
"learning_rate": 0.00019981946439295148, |
|
"loss": 1.9727, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.32935260842237585, |
|
"grad_norm": 6.928370952606201, |
|
"learning_rate": 0.00019980268519348638, |
|
"loss": 1.6597, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.3299811439346323, |
|
"grad_norm": 11.314483642578125, |
|
"learning_rate": 0.00019978516136618586, |
|
"loss": 1.8615, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.33060967944688874, |
|
"grad_norm": 7.91243839263916, |
|
"learning_rate": 0.0001997668930563543, |
|
"loss": 1.9287, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.3312382149591452, |
|
"grad_norm": 13.120015144348145, |
|
"learning_rate": 0.00019974788041546938, |
|
"loss": 1.8362, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.33186675047140163, |
|
"grad_norm": 5.466783046722412, |
|
"learning_rate": 0.00019972812360118043, |
|
"loss": 2.0244, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.3324952859836581, |
|
"grad_norm": 4.511836528778076, |
|
"learning_rate": 0.0001997076227773075, |
|
"loss": 1.9735, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.3331238214959145, |
|
"grad_norm": 54.1873779296875, |
|
"learning_rate": 0.0001996863781138397, |
|
"loss": 1.8809, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.33375235700817096, |
|
"grad_norm": 39.161216735839844, |
|
"learning_rate": 0.00019966438978693402, |
|
"loss": 2.0392, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.3343808925204274, |
|
"grad_norm": 64.01873016357422, |
|
"learning_rate": 0.00019964165797891373, |
|
"loss": 1.7525, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.33500942803268385, |
|
"grad_norm": 5.953110694885254, |
|
"learning_rate": 0.00019961818287826693, |
|
"loss": 1.9637, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.3356379635449403, |
|
"grad_norm": 5.852209091186523, |
|
"learning_rate": 0.00019959396467964497, |
|
"loss": 1.9106, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.33626649905719674, |
|
"grad_norm": 7.1264328956604, |
|
"learning_rate": 0.00019956900358386083, |
|
"loss": 2.0108, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.3368950345694532, |
|
"grad_norm": 12.140629768371582, |
|
"learning_rate": 0.0001995432997978874, |
|
"loss": 2.0983, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.33752357008170963, |
|
"grad_norm": 14.71324634552002, |
|
"learning_rate": 0.00019951685353485603, |
|
"loss": 2.0762, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.3381521055939661, |
|
"grad_norm": 14.816312789916992, |
|
"learning_rate": 0.00019948966501405425, |
|
"loss": 2.0775, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.3387806411062225, |
|
"grad_norm": 6.533554553985596, |
|
"learning_rate": 0.0001994617344609245, |
|
"loss": 2.2237, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.33940917661847897, |
|
"grad_norm": 9.91523265838623, |
|
"learning_rate": 0.00019943306210706198, |
|
"loss": 1.7658, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.34003771213073536, |
|
"grad_norm": 5.4957780838012695, |
|
"learning_rate": 0.00019940364819021265, |
|
"loss": 1.699, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.3406662476429918, |
|
"grad_norm": 4.798709869384766, |
|
"learning_rate": 0.0001993734929542715, |
|
"loss": 2.1305, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.34129478315524825, |
|
"grad_norm": 10.779973983764648, |
|
"learning_rate": 0.00019934259664928036, |
|
"loss": 1.697, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.3419233186675047, |
|
"grad_norm": 82.30536651611328, |
|
"learning_rate": 0.00019931095953142585, |
|
"loss": 3.1804, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.34255185417976114, |
|
"grad_norm": 6.669949531555176, |
|
"learning_rate": 0.00019927858186303735, |
|
"loss": 2.0105, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.3431803896920176, |
|
"grad_norm": 6.1852803230285645, |
|
"learning_rate": 0.00019924546391258464, |
|
"loss": 1.6387, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.34380892520427403, |
|
"grad_norm": 6.42205810546875, |
|
"learning_rate": 0.00019921160595467595, |
|
"loss": 2.0396, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.3444374607165305, |
|
"grad_norm": 4.6135406494140625, |
|
"learning_rate": 0.00019917700827005536, |
|
"loss": 2.1966, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.3450659962287869, |
|
"grad_norm": 3.7657713890075684, |
|
"learning_rate": 0.00019914167114560064, |
|
"loss": 1.7663, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.34569453174104336, |
|
"grad_norm": 4.691615104675293, |
|
"learning_rate": 0.00019910559487432104, |
|
"loss": 1.7651, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3463230672532998, |
|
"grad_norm": 3.708641290664673, |
|
"learning_rate": 0.00019906877975535447, |
|
"loss": 1.9406, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.34695160276555626, |
|
"grad_norm": 30.91510772705078, |
|
"learning_rate": 0.0001990312260939654, |
|
"loss": 2.4156, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.3475801382778127, |
|
"grad_norm": 4.338770389556885, |
|
"learning_rate": 0.00019899293420154202, |
|
"loss": 1.726, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.34820867379006915, |
|
"grad_norm": 4.463732719421387, |
|
"learning_rate": 0.00019895390439559387, |
|
"loss": 2.1101, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.3488372093023256, |
|
"grad_norm": 3.511244297027588, |
|
"learning_rate": 0.00019891413699974918, |
|
"loss": 1.7408, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.34946574481458204, |
|
"grad_norm": 6.33323860168457, |
|
"learning_rate": 0.00019887363234375206, |
|
"loss": 2.2318, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.3500942803268385, |
|
"grad_norm": 7.024548053741455, |
|
"learning_rate": 0.00019883239076345992, |
|
"loss": 1.7199, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.3507228158390949, |
|
"grad_norm": 4.091303825378418, |
|
"learning_rate": 0.00019879041260084055, |
|
"loss": 2.2822, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.35135135135135137, |
|
"grad_norm": 4.067759037017822, |
|
"learning_rate": 0.0001987476982039694, |
|
"loss": 1.9152, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.3519798868636078, |
|
"grad_norm": 21.216266632080078, |
|
"learning_rate": 0.0001987042479270266, |
|
"loss": 1.631, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.35260842237586426, |
|
"grad_norm": 9.859007835388184, |
|
"learning_rate": 0.00019866006213029413, |
|
"loss": 2.1393, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.3532369578881207, |
|
"grad_norm": 6.041215896606445, |
|
"learning_rate": 0.0001986151411801527, |
|
"loss": 1.8395, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.3538654934003771, |
|
"grad_norm": 7.770387172698975, |
|
"learning_rate": 0.0001985694854490788, |
|
"loss": 2.0751, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.35449402891263354, |
|
"grad_norm": 3.955648422241211, |
|
"learning_rate": 0.00019852309531564162, |
|
"loss": 2.0338, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.35512256442489, |
|
"grad_norm": 64.14674377441406, |
|
"learning_rate": 0.00019847597116449988, |
|
"loss": 2.3877, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.35575109993714643, |
|
"grad_norm": 8.95103645324707, |
|
"learning_rate": 0.0001984281133863986, |
|
"loss": 1.9109, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.3563796354494029, |
|
"grad_norm": 17.331172943115234, |
|
"learning_rate": 0.000198379522378166, |
|
"loss": 1.9694, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.3570081709616593, |
|
"grad_norm": 10.267071723937988, |
|
"learning_rate": 0.00019833019854270992, |
|
"loss": 2.0442, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.35763670647391577, |
|
"grad_norm": 12.023430824279785, |
|
"learning_rate": 0.0001982801422890149, |
|
"loss": 1.697, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.3582652419861722, |
|
"grad_norm": 3.4745445251464844, |
|
"learning_rate": 0.00019822935403213845, |
|
"loss": 1.8609, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.35889377749842866, |
|
"grad_norm": 14.138343811035156, |
|
"learning_rate": 0.00019817783419320768, |
|
"loss": 1.8126, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.3595223130106851, |
|
"grad_norm": 4.1279449462890625, |
|
"learning_rate": 0.00019812558319941596, |
|
"loss": 1.7317, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.36015084852294155, |
|
"grad_norm": 3.9519424438476562, |
|
"learning_rate": 0.00019807260148401915, |
|
"loss": 1.8072, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.360779384035198, |
|
"grad_norm": 5.094558238983154, |
|
"learning_rate": 0.00019801888948633225, |
|
"loss": 1.9189, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.36140791954745444, |
|
"grad_norm": 5.643648624420166, |
|
"learning_rate": 0.00019796444765172547, |
|
"loss": 1.6462, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3620364550597109, |
|
"grad_norm": 5.166630268096924, |
|
"learning_rate": 0.0001979092764316208, |
|
"loss": 1.9548, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.3626649905719673, |
|
"grad_norm": 5.57295036315918, |
|
"learning_rate": 0.0001978533762834882, |
|
"loss": 2.1468, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.3632935260842238, |
|
"grad_norm": 6.271886348724365, |
|
"learning_rate": 0.00019779674767084172, |
|
"loss": 2.1451, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.3639220615964802, |
|
"grad_norm": 6.9632887840271, |
|
"learning_rate": 0.00019773939106323564, |
|
"loss": 2.0131, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.36455059710873666, |
|
"grad_norm": 5.3868303298950195, |
|
"learning_rate": 0.00019768130693626076, |
|
"loss": 2.0985, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3651791326209931, |
|
"grad_norm": 14.987902641296387, |
|
"learning_rate": 0.00019762249577154028, |
|
"loss": 1.8193, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.36580766813324955, |
|
"grad_norm": 3.5422184467315674, |
|
"learning_rate": 0.0001975629580567259, |
|
"loss": 1.8757, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.366436203645506, |
|
"grad_norm": 3.7627673149108887, |
|
"learning_rate": 0.0001975026942854937, |
|
"loss": 2.0999, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.3670647391577624, |
|
"grad_norm": 12.89395523071289, |
|
"learning_rate": 0.0001974417049575401, |
|
"loss": 1.6594, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.36769327467001883, |
|
"grad_norm": 4.728719711303711, |
|
"learning_rate": 0.00019737999057857774, |
|
"loss": 1.8786, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.3683218101822753, |
|
"grad_norm": 9.114471435546875, |
|
"learning_rate": 0.00019731755166033126, |
|
"loss": 1.9816, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.3689503456945317, |
|
"grad_norm": 16.096595764160156, |
|
"learning_rate": 0.00019725438872053295, |
|
"loss": 2.1673, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.36957888120678817, |
|
"grad_norm": 2.9777557849884033, |
|
"learning_rate": 0.00019719050228291876, |
|
"loss": 1.9243, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.3702074167190446, |
|
"grad_norm": 5.046726703643799, |
|
"learning_rate": 0.0001971258928772235, |
|
"loss": 1.9836, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.37083595223130106, |
|
"grad_norm": 4.913296222686768, |
|
"learning_rate": 0.00019706056103917688, |
|
"loss": 2.0576, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.3714644877435575, |
|
"grad_norm": 4.675854682922363, |
|
"learning_rate": 0.00019699450731049882, |
|
"loss": 2.1639, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.37209302325581395, |
|
"grad_norm": 5.70509672164917, |
|
"learning_rate": 0.0001969277322388951, |
|
"loss": 1.7714, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.3727215587680704, |
|
"grad_norm": 6.298268795013428, |
|
"learning_rate": 0.00019686023637805264, |
|
"loss": 2.0385, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.37335009428032684, |
|
"grad_norm": 3.511186361312866, |
|
"learning_rate": 0.0001967920202876351, |
|
"loss": 1.6813, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.3739786297925833, |
|
"grad_norm": 3.8464162349700928, |
|
"learning_rate": 0.0001967230845332781, |
|
"loss": 1.4338, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.37460716530483973, |
|
"grad_norm": 4.702010631561279, |
|
"learning_rate": 0.00019665342968658463, |
|
"loss": 1.9824, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.3752357008170962, |
|
"grad_norm": 24.309362411499023, |
|
"learning_rate": 0.00019658305632512023, |
|
"loss": 1.7995, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.3758642363293526, |
|
"grad_norm": 8.361018180847168, |
|
"learning_rate": 0.00019651196503240822, |
|
"loss": 1.8303, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.37649277184160906, |
|
"grad_norm": 13.187949180603027, |
|
"learning_rate": 0.000196440156397925, |
|
"loss": 1.5381, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.3771213073538655, |
|
"grad_norm": 3.4687323570251465, |
|
"learning_rate": 0.00019636763101709488, |
|
"loss": 1.6627, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.37774984286612195, |
|
"grad_norm": 3.7859907150268555, |
|
"learning_rate": 0.00019629438949128538, |
|
"loss": 1.7985, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.3783783783783784, |
|
"grad_norm": 4.294689655303955, |
|
"learning_rate": 0.00019622043242780219, |
|
"loss": 1.7507, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.37900691389063484, |
|
"grad_norm": 5.246135234832764, |
|
"learning_rate": 0.00019614576043988405, |
|
"loss": 1.9783, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.3796354494028913, |
|
"grad_norm": 6.5482258796691895, |
|
"learning_rate": 0.00019607037414669774, |
|
"loss": 1.6939, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.3802639849151477, |
|
"grad_norm": 7.9593963623046875, |
|
"learning_rate": 0.00019599427417333297, |
|
"loss": 1.7155, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.3808925204274041, |
|
"grad_norm": 4.534102916717529, |
|
"learning_rate": 0.0001959174611507971, |
|
"loss": 1.6135, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.38152105593966057, |
|
"grad_norm": 3.6695873737335205, |
|
"learning_rate": 0.00019583993571601008, |
|
"loss": 2.0391, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.382149591451917, |
|
"grad_norm": 26.93898582458496, |
|
"learning_rate": 0.0001957616985117989, |
|
"loss": 1.8184, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.38277812696417346, |
|
"grad_norm": 6.302891254425049, |
|
"learning_rate": 0.00019568275018689256, |
|
"loss": 2.2152, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.3834066624764299, |
|
"grad_norm": 4.594456195831299, |
|
"learning_rate": 0.00019560309139591642, |
|
"loss": 1.9796, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.38403519798868635, |
|
"grad_norm": 5.86282205581665, |
|
"learning_rate": 0.00019552272279938694, |
|
"loss": 1.8413, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.3846637335009428, |
|
"grad_norm": 4.744015216827393, |
|
"learning_rate": 0.00019544164506370625, |
|
"loss": 1.7081, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.38529226901319924, |
|
"grad_norm": 7.284010887145996, |
|
"learning_rate": 0.00019535985886115639, |
|
"loss": 1.825, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.3859208045254557, |
|
"grad_norm": 3.884884834289551, |
|
"learning_rate": 0.00019527736486989398, |
|
"loss": 1.6148, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.38654934003771213, |
|
"grad_norm": 5.0570068359375, |
|
"learning_rate": 0.0001951941637739445, |
|
"loss": 1.7481, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.3871778755499686, |
|
"grad_norm": 3.745852470397949, |
|
"learning_rate": 0.00019511025626319656, |
|
"loss": 2.0021, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.387806411062225, |
|
"grad_norm": 7.246944904327393, |
|
"learning_rate": 0.00019502564303339624, |
|
"loss": 1.9073, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.38843494657448147, |
|
"grad_norm": 4.088373184204102, |
|
"learning_rate": 0.00019494032478614137, |
|
"loss": 1.7618, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.3890634820867379, |
|
"grad_norm": 3.925509214401245, |
|
"learning_rate": 0.0001948543022288756, |
|
"loss": 1.9142, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.38969201759899436, |
|
"grad_norm": 5.785417079925537, |
|
"learning_rate": 0.00019476757607488266, |
|
"loss": 2.0534, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.3903205531112508, |
|
"grad_norm": 4.438549518585205, |
|
"learning_rate": 0.00019468014704328027, |
|
"loss": 2.0836, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.39094908862350725, |
|
"grad_norm": 3.809293508529663, |
|
"learning_rate": 0.0001945920158590144, |
|
"loss": 1.8568, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.3915776241357637, |
|
"grad_norm": 43.43647384643555, |
|
"learning_rate": 0.00019450318325285314, |
|
"loss": 1.8248, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.39220615964802014, |
|
"grad_norm": 4.826264381408691, |
|
"learning_rate": 0.00019441364996138053, |
|
"loss": 2.0728, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.3928346951602766, |
|
"grad_norm": 5.580936908721924, |
|
"learning_rate": 0.00019432341672699066, |
|
"loss": 1.5899, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.39346323067253297, |
|
"grad_norm": 30.76082420349121, |
|
"learning_rate": 0.00019423248429788144, |
|
"loss": 1.7365, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.3940917661847894, |
|
"grad_norm": 34.04156494140625, |
|
"learning_rate": 0.00019414085342804826, |
|
"loss": 1.8151, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.39472030169704586, |
|
"grad_norm": 7.619344711303711, |
|
"learning_rate": 0.000194048524877278, |
|
"loss": 2.1841, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.3953488372093023, |
|
"grad_norm": 6.076333999633789, |
|
"learning_rate": 0.00019395549941114254, |
|
"loss": 1.3382, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.39597737272155875, |
|
"grad_norm": 4.319580554962158, |
|
"learning_rate": 0.00019386177780099237, |
|
"loss": 2.0434, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.3966059082338152, |
|
"grad_norm": 5.401275634765625, |
|
"learning_rate": 0.0001937673608239504, |
|
"loss": 2.0768, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.39723444374607164, |
|
"grad_norm": 7.192997455596924, |
|
"learning_rate": 0.00019367224926290536, |
|
"loss": 2.2093, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.3978629792583281, |
|
"grad_norm": 5.700124740600586, |
|
"learning_rate": 0.0001935764439065053, |
|
"loss": 1.8486, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.39849151477058453, |
|
"grad_norm": 6.410958766937256, |
|
"learning_rate": 0.00019347994554915114, |
|
"loss": 1.7625, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.399120050282841, |
|
"grad_norm": 4.75112247467041, |
|
"learning_rate": 0.00019338275499099005, |
|
"loss": 1.9693, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.3997485857950974, |
|
"grad_norm": 4.653494834899902, |
|
"learning_rate": 0.00019328487303790879, |
|
"loss": 1.9736, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.40037712130735387, |
|
"grad_norm": 2.9836111068725586, |
|
"learning_rate": 0.00019318630050152704, |
|
"loss": 1.7472, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.4010056568196103, |
|
"grad_norm": 2.469727039337158, |
|
"learning_rate": 0.00019308703819919065, |
|
"loss": 2.2143, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.40163419233186676, |
|
"grad_norm": 25.313440322875977, |
|
"learning_rate": 0.00019298708695396496, |
|
"loss": 2.1339, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.4022627278441232, |
|
"grad_norm": 3.8813374042510986, |
|
"learning_rate": 0.0001928864475946278, |
|
"loss": 2.088, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.4022627278441232, |
|
"eval_loss": 1.1269772052764893, |
|
"eval_runtime": 1568.8549, |
|
"eval_samples_per_second": 1.643, |
|
"eval_steps_per_second": 1.643, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.40289126335637965, |
|
"grad_norm": 9.410492897033691, |
|
"learning_rate": 0.00019278512095566278, |
|
"loss": 1.9134, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.4035197988686361, |
|
"grad_norm": 4.390370845794678, |
|
"learning_rate": 0.00019268310787725238, |
|
"loss": 1.5095, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.40414833438089254, |
|
"grad_norm": 3.2495274543762207, |
|
"learning_rate": 0.00019258040920527075, |
|
"loss": 1.5648, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.404776869893149, |
|
"grad_norm": 2.887073278427124, |
|
"learning_rate": 0.00019247702579127694, |
|
"loss": 1.9786, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.40540540540540543, |
|
"grad_norm": 3.6503512859344482, |
|
"learning_rate": 0.00019237295849250786, |
|
"loss": 1.7403, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.4060339409176619, |
|
"grad_norm": 3.6960840225219727, |
|
"learning_rate": 0.0001922682081718709, |
|
"loss": 1.8488, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.4066624764299183, |
|
"grad_norm": 47.15095520019531, |
|
"learning_rate": 0.00019216277569793702, |
|
"loss": 2.8805, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.4072910119421747, |
|
"grad_norm": 3.1784255504608154, |
|
"learning_rate": 0.00019205666194493348, |
|
"loss": 1.7315, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.40791954745443115, |
|
"grad_norm": 3.352231740951538, |
|
"learning_rate": 0.0001919498677927366, |
|
"loss": 1.7752, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.4085480829666876, |
|
"grad_norm": 3.8916022777557373, |
|
"learning_rate": 0.00019184239412686443, |
|
"loss": 1.6095, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.40917661847894404, |
|
"grad_norm": 4.626668453216553, |
|
"learning_rate": 0.00019173424183846938, |
|
"loss": 2.0136, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.4098051539912005, |
|
"grad_norm": 3.6814186573028564, |
|
"learning_rate": 0.00019162541182433094, |
|
"loss": 1.9785, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.41043368950345693, |
|
"grad_norm": 2.4360921382904053, |
|
"learning_rate": 0.0001915159049868482, |
|
"loss": 2.1039, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.4110622250157134, |
|
"grad_norm": 3.070681571960449, |
|
"learning_rate": 0.00019140572223403222, |
|
"loss": 1.8298, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.4116907605279698, |
|
"grad_norm": 3.048560619354248, |
|
"learning_rate": 0.00019129486447949884, |
|
"loss": 1.9087, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.41231929604022627, |
|
"grad_norm": 8.032454490661621, |
|
"learning_rate": 0.00019118333264246066, |
|
"loss": 1.8988, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.4129478315524827, |
|
"grad_norm": 5.433006763458252, |
|
"learning_rate": 0.00019107112764771983, |
|
"loss": 1.7312, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.41357636706473916, |
|
"grad_norm": 5.955564022064209, |
|
"learning_rate": 0.00019095825042566008, |
|
"loss": 2.0372, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.4142049025769956, |
|
"grad_norm": 16.444969177246094, |
|
"learning_rate": 0.00019084470191223926, |
|
"loss": 1.7105, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.41483343808925205, |
|
"grad_norm": 7.758225440979004, |
|
"learning_rate": 0.00019073048304898133, |
|
"loss": 1.8812, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.4154619736015085, |
|
"grad_norm": 4.389394283294678, |
|
"learning_rate": 0.00019061559478296872, |
|
"loss": 1.9399, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.41609050911376494, |
|
"grad_norm": 6.461364269256592, |
|
"learning_rate": 0.00019050003806683443, |
|
"loss": 1.557, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.4167190446260214, |
|
"grad_norm": 6.0508317947387695, |
|
"learning_rate": 0.0001903838138587541, |
|
"loss": 1.7769, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.41734758013827783, |
|
"grad_norm": 5.960983753204346, |
|
"learning_rate": 0.00019026692312243816, |
|
"loss": 1.7441, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.4179761156505343, |
|
"grad_norm": 5.332518577575684, |
|
"learning_rate": 0.0001901493668271237, |
|
"loss": 1.7507, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.4186046511627907, |
|
"grad_norm": 4.627866268157959, |
|
"learning_rate": 0.00019003114594756654, |
|
"loss": 1.588, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.41923318667504716, |
|
"grad_norm": 5.30118465423584, |
|
"learning_rate": 0.0001899122614640331, |
|
"loss": 2.1642, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.4198617221873036, |
|
"grad_norm": 3.94102144241333, |
|
"learning_rate": 0.00018979271436229232, |
|
"loss": 1.8787, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.42049025769956, |
|
"grad_norm": 3.952944040298462, |
|
"learning_rate": 0.0001896725056336074, |
|
"loss": 2.1777, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.42111879321181644, |
|
"grad_norm": 17.84577178955078, |
|
"learning_rate": 0.00018955163627472772, |
|
"loss": 1.5896, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.4217473287240729, |
|
"grad_norm": 3.3658595085144043, |
|
"learning_rate": 0.00018943010728788036, |
|
"loss": 1.7622, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.42237586423632933, |
|
"grad_norm": 3.9689407348632812, |
|
"learning_rate": 0.00018930791968076202, |
|
"loss": 1.8379, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.4230043997485858, |
|
"grad_norm": 2.2758445739746094, |
|
"learning_rate": 0.0001891850744665305, |
|
"loss": 1.6416, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.4236329352608422, |
|
"grad_norm": 25.5117130279541, |
|
"learning_rate": 0.00018906157266379637, |
|
"loss": 1.6938, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.42426147077309867, |
|
"grad_norm": 2.699244976043701, |
|
"learning_rate": 0.00018893741529661464, |
|
"loss": 1.9915, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4248900062853551, |
|
"grad_norm": 34.14410400390625, |
|
"learning_rate": 0.00018881260339447594, |
|
"loss": 1.7325, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.42551854179761156, |
|
"grad_norm": 3.199965476989746, |
|
"learning_rate": 0.0001886871379922983, |
|
"loss": 1.9895, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.426147077309868, |
|
"grad_norm": 29.026304244995117, |
|
"learning_rate": 0.00018856102013041842, |
|
"loss": 1.5769, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.42677561282212445, |
|
"grad_norm": 2.1777989864349365, |
|
"learning_rate": 0.00018843425085458316, |
|
"loss": 1.8859, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.4274041483343809, |
|
"grad_norm": 4.491693019866943, |
|
"learning_rate": 0.0001883068312159407, |
|
"loss": 1.9437, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.42803268384663734, |
|
"grad_norm": 8.690496444702148, |
|
"learning_rate": 0.00018817876227103189, |
|
"loss": 1.7174, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.4286612193588938, |
|
"grad_norm": 3.7214949131011963, |
|
"learning_rate": 0.00018805004508178161, |
|
"loss": 2.0811, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.42928975487115023, |
|
"grad_norm": 6.077978610992432, |
|
"learning_rate": 0.00018792068071548984, |
|
"loss": 1.7471, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.4299182903834067, |
|
"grad_norm": 6.373495578765869, |
|
"learning_rate": 0.00018779067024482274, |
|
"loss": 2.0545, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.4305468258956631, |
|
"grad_norm": 5.059445381164551, |
|
"learning_rate": 0.00018766001474780397, |
|
"loss": 2.0324, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.43117536140791957, |
|
"grad_norm": 5.652797222137451, |
|
"learning_rate": 0.0001875287153078056, |
|
"loss": 1.6775, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.431803896920176, |
|
"grad_norm": 4.22088623046875, |
|
"learning_rate": 0.0001873967730135391, |
|
"loss": 1.8297, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.43243243243243246, |
|
"grad_norm": 2.631542921066284, |
|
"learning_rate": 0.0001872641889590465, |
|
"loss": 1.9138, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.4330609679446889, |
|
"grad_norm": 2.948751211166382, |
|
"learning_rate": 0.00018713096424369103, |
|
"loss": 1.6555, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.4336895034569453, |
|
"grad_norm": 2.5569674968719482, |
|
"learning_rate": 0.00018699709997214832, |
|
"loss": 1.5643, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.43431803896920174, |
|
"grad_norm": 4.394289493560791, |
|
"learning_rate": 0.00018686259725439691, |
|
"loss": 1.621, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.4349465744814582, |
|
"grad_norm": 2.499648332595825, |
|
"learning_rate": 0.00018672745720570947, |
|
"loss": 2.168, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.4355751099937146, |
|
"grad_norm": 2.7896997928619385, |
|
"learning_rate": 0.00018659168094664304, |
|
"loss": 2.0703, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.43620364550597107, |
|
"grad_norm": 6.9485650062561035, |
|
"learning_rate": 0.00018645526960303022, |
|
"loss": 1.6304, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.4368321810182275, |
|
"grad_norm": 59.711971282958984, |
|
"learning_rate": 0.0001863182243059694, |
|
"loss": 2.3173, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.43746071653048396, |
|
"grad_norm": 9.602359771728516, |
|
"learning_rate": 0.00018618054619181585, |
|
"loss": 1.7722, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.4380892520427404, |
|
"grad_norm": 3.540498971939087, |
|
"learning_rate": 0.00018604223640217185, |
|
"loss": 1.4779, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.43871778755499685, |
|
"grad_norm": 3.870150089263916, |
|
"learning_rate": 0.00018590329608387748, |
|
"loss": 1.4608, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.4393463230672533, |
|
"grad_norm": 2.603410005569458, |
|
"learning_rate": 0.0001857637263890011, |
|
"loss": 1.9159, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.43997485857950974, |
|
"grad_norm": 2.723639488220215, |
|
"learning_rate": 0.00018562352847482965, |
|
"loss": 1.4755, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4406033940917662, |
|
"grad_norm": 5.783612251281738, |
|
"learning_rate": 0.00018548270350385922, |
|
"loss": 1.4706, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.44123192960402263, |
|
"grad_norm": 5.094363689422607, |
|
"learning_rate": 0.00018534125264378537, |
|
"loss": 1.7839, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.4418604651162791, |
|
"grad_norm": 8.562081336975098, |
|
"learning_rate": 0.00018519917706749336, |
|
"loss": 1.7617, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.4424890006285355, |
|
"grad_norm": 3.990987777709961, |
|
"learning_rate": 0.0001850564779530485, |
|
"loss": 1.6584, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.44311753614079197, |
|
"grad_norm": 4.689113616943359, |
|
"learning_rate": 0.0001849131564836864, |
|
"loss": 1.7704, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.4437460716530484, |
|
"grad_norm": 3.648507595062256, |
|
"learning_rate": 0.00018476921384780306, |
|
"loss": 1.999, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.44437460716530486, |
|
"grad_norm": 4.041159152984619, |
|
"learning_rate": 0.00018462465123894514, |
|
"loss": 1.8709, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.4450031426775613, |
|
"grad_norm": 5.241822719573975, |
|
"learning_rate": 0.0001844794698558, |
|
"loss": 2.04, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.44563167818981775, |
|
"grad_norm": 3.5962722301483154, |
|
"learning_rate": 0.00018433367090218573, |
|
"loss": 1.7618, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.4462602137020742, |
|
"grad_norm": 4.885190010070801, |
|
"learning_rate": 0.0001841872555870413, |
|
"loss": 1.7482, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.4468887492143306, |
|
"grad_norm": 11.033466339111328, |
|
"learning_rate": 0.00018404022512441627, |
|
"loss": 1.6004, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.44751728472658703, |
|
"grad_norm": 2.763528823852539, |
|
"learning_rate": 0.000183892580733461, |
|
"loss": 1.8139, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.4481458202388435, |
|
"grad_norm": 2.823786735534668, |
|
"learning_rate": 0.0001837443236384165, |
|
"loss": 1.932, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.4487743557510999, |
|
"grad_norm": 3.0920910835266113, |
|
"learning_rate": 0.0001835954550686041, |
|
"loss": 2.1842, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.44940289126335636, |
|
"grad_norm": 6.687446594238281, |
|
"learning_rate": 0.00018344597625841538, |
|
"loss": 1.4632, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.4500314267756128, |
|
"grad_norm": 3.8577492237091064, |
|
"learning_rate": 0.00018329588844730196, |
|
"loss": 1.5663, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.45065996228786925, |
|
"grad_norm": 4.575584411621094, |
|
"learning_rate": 0.0001831451928797652, |
|
"loss": 1.9887, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.4512884978001257, |
|
"grad_norm": 3.485588788986206, |
|
"learning_rate": 0.00018299389080534586, |
|
"loss": 1.7647, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.45191703331238214, |
|
"grad_norm": 2.9639642238616943, |
|
"learning_rate": 0.0001828419834786137, |
|
"loss": 1.6995, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.4525455688246386, |
|
"grad_norm": 4.451672554016113, |
|
"learning_rate": 0.00018268947215915716, |
|
"loss": 1.5361, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.45317410433689503, |
|
"grad_norm": 4.583763599395752, |
|
"learning_rate": 0.00018253635811157285, |
|
"loss": 1.1242, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.4538026398491515, |
|
"grad_norm": 4.331716537475586, |
|
"learning_rate": 0.00018238264260545518, |
|
"loss": 1.8472, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.4544311753614079, |
|
"grad_norm": 2.900270700454712, |
|
"learning_rate": 0.00018222832691538568, |
|
"loss": 1.9554, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.45505971087366437, |
|
"grad_norm": 3.316112756729126, |
|
"learning_rate": 0.00018207341232092248, |
|
"loss": 1.8085, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.4556882463859208, |
|
"grad_norm": 3.347519874572754, |
|
"learning_rate": 0.00018191790010658974, |
|
"loss": 1.7429, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.45631678189817726, |
|
"grad_norm": 3.0476086139678955, |
|
"learning_rate": 0.00018176179156186698, |
|
"loss": 1.7755, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.4569453174104337, |
|
"grad_norm": 4.309592247009277, |
|
"learning_rate": 0.00018160508798117843, |
|
"loss": 1.5496, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.45757385292269015, |
|
"grad_norm": 8.562623023986816, |
|
"learning_rate": 0.00018144779066388218, |
|
"loss": 1.789, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.4582023884349466, |
|
"grad_norm": 2.293079137802124, |
|
"learning_rate": 0.00018128990091425949, |
|
"loss": 2.1914, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.45883092394720304, |
|
"grad_norm": 8.049859046936035, |
|
"learning_rate": 0.00018113142004150398, |
|
"loss": 1.6645, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.4594594594594595, |
|
"grad_norm": 19.898277282714844, |
|
"learning_rate": 0.0001809723493597108, |
|
"loss": 1.2446, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.4600879949717159, |
|
"grad_norm": 29.26360321044922, |
|
"learning_rate": 0.00018081269018786563, |
|
"loss": 1.612, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.4607165304839723, |
|
"grad_norm": 3.1725094318389893, |
|
"learning_rate": 0.0001806524438498338, |
|
"loss": 1.9092, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.46134506599622876, |
|
"grad_norm": 2.3968775272369385, |
|
"learning_rate": 0.00018049161167434937, |
|
"loss": 2.1533, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.4619736015084852, |
|
"grad_norm": 3.0249500274658203, |
|
"learning_rate": 0.00018033019499500398, |
|
"loss": 1.7566, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.46260213702074165, |
|
"grad_norm": 3.576960325241089, |
|
"learning_rate": 0.000180168195150236, |
|
"loss": 1.444, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.4632306725329981, |
|
"grad_norm": 2.93385648727417, |
|
"learning_rate": 0.00018000561348331914, |
|
"loss": 1.8811, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.46385920804525455, |
|
"grad_norm": 2.894970178604126, |
|
"learning_rate": 0.0001798424513423516, |
|
"loss": 1.8163, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.464487743557511, |
|
"grad_norm": 3.2280712127685547, |
|
"learning_rate": 0.00017967871008024478, |
|
"loss": 1.7769, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.46511627906976744, |
|
"grad_norm": 3.0282838344573975, |
|
"learning_rate": 0.00017951439105471192, |
|
"loss": 2.0009, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.4657448145820239, |
|
"grad_norm": 3.0622615814208984, |
|
"learning_rate": 0.00017934949562825706, |
|
"loss": 1.978, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.4663733500942803, |
|
"grad_norm": 5.17506742477417, |
|
"learning_rate": 0.00017918402516816368, |
|
"loss": 1.7729, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.46700188560653677, |
|
"grad_norm": 2.4315149784088135, |
|
"learning_rate": 0.00017901798104648324, |
|
"loss": 1.6185, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.4676304211187932, |
|
"grad_norm": 6.906350612640381, |
|
"learning_rate": 0.00017885136464002404, |
|
"loss": 1.443, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.46825895663104966, |
|
"grad_norm": 8.11252212524414, |
|
"learning_rate": 0.0001786841773303395, |
|
"loss": 1.7909, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4688874921433061, |
|
"grad_norm": 2.88930082321167, |
|
"learning_rate": 0.000178516420503717, |
|
"loss": 1.8463, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.46951602765556255, |
|
"grad_norm": 6.115964889526367, |
|
"learning_rate": 0.00017834809555116615, |
|
"loss": 2.1431, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.470144563167819, |
|
"grad_norm": 4.440715789794922, |
|
"learning_rate": 0.00017817920386840744, |
|
"loss": 2.0347, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.47077309868007544, |
|
"grad_norm": 3.351571559906006, |
|
"learning_rate": 0.0001780097468558606, |
|
"loss": 1.4647, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.4714016341923319, |
|
"grad_norm": 3.25675106048584, |
|
"learning_rate": 0.00017783972591863288, |
|
"loss": 1.4636, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.47203016970458833, |
|
"grad_norm": 4.558582305908203, |
|
"learning_rate": 0.00017766914246650749, |
|
"loss": 1.3471, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.4726587052168448, |
|
"grad_norm": 5.9593024253845215, |
|
"learning_rate": 0.00017749799791393206, |
|
"loss": 1.8219, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.4732872407291012, |
|
"grad_norm": 4.069904327392578, |
|
"learning_rate": 0.00017732629368000667, |
|
"loss": 1.9191, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.4739157762413576, |
|
"grad_norm": 4.142630577087402, |
|
"learning_rate": 0.00017715403118847208, |
|
"loss": 1.7309, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.47454431175361406, |
|
"grad_norm": 3.7842137813568115, |
|
"learning_rate": 0.0001769812118676982, |
|
"loss": 1.6221, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.4751728472658705, |
|
"grad_norm": 3.864184856414795, |
|
"learning_rate": 0.000176807837150672, |
|
"loss": 1.8064, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.47580138277812695, |
|
"grad_norm": 3.0092368125915527, |
|
"learning_rate": 0.0001766339084749856, |
|
"loss": 1.6365, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.4764299182903834, |
|
"grad_norm": 12.817537307739258, |
|
"learning_rate": 0.00017645942728282464, |
|
"loss": 1.8892, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.47705845380263984, |
|
"grad_norm": 11.806923866271973, |
|
"learning_rate": 0.00017628439502095594, |
|
"loss": 1.6168, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.4776869893148963, |
|
"grad_norm": 5.879858493804932, |
|
"learning_rate": 0.0001761088131407158, |
|
"loss": 1.8019, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4783155248271527, |
|
"grad_norm": 3.0829169750213623, |
|
"learning_rate": 0.00017593268309799783, |
|
"loss": 1.5807, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.47894406033940917, |
|
"grad_norm": 2.097031831741333, |
|
"learning_rate": 0.00017575600635324088, |
|
"loss": 2.0684, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.4795725958516656, |
|
"grad_norm": 2.688417673110962, |
|
"learning_rate": 0.00017557878437141698, |
|
"loss": 1.8658, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.48020113136392206, |
|
"grad_norm": 3.2802345752716064, |
|
"learning_rate": 0.00017540101862201926, |
|
"loss": 1.9306, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.4808296668761785, |
|
"grad_norm": 3.1129918098449707, |
|
"learning_rate": 0.00017522271057904946, |
|
"loss": 1.6982, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.48145820238843495, |
|
"grad_norm": 2.2457778453826904, |
|
"learning_rate": 0.0001750438617210061, |
|
"loss": 1.9326, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.4820867379006914, |
|
"grad_norm": 2.9294021129608154, |
|
"learning_rate": 0.000174864473530872, |
|
"loss": 2.0253, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.48271527341294784, |
|
"grad_norm": 2.652953863143921, |
|
"learning_rate": 0.00017468454749610196, |
|
"loss": 1.3577, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.4833438089252043, |
|
"grad_norm": 3.715381622314453, |
|
"learning_rate": 0.0001745040851086106, |
|
"loss": 1.922, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.48397234443746073, |
|
"grad_norm": 3.603480100631714, |
|
"learning_rate": 0.00017432308786475978, |
|
"loss": 1.8925, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.4846008799497172, |
|
"grad_norm": 4.010918617248535, |
|
"learning_rate": 0.0001741415572653464, |
|
"loss": 1.6332, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.4852294154619736, |
|
"grad_norm": 3.399214029312134, |
|
"learning_rate": 0.00017395949481558966, |
|
"loss": 1.9582, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.48585795097423007, |
|
"grad_norm": 2.358330488204956, |
|
"learning_rate": 0.00017377690202511904, |
|
"loss": 1.8979, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.4864864864864865, |
|
"grad_norm": 2.503108263015747, |
|
"learning_rate": 0.00017359378040796123, |
|
"loss": 1.6271, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.4871150219987429, |
|
"grad_norm": 2.3593077659606934, |
|
"learning_rate": 0.00017341013148252813, |
|
"loss": 1.8728, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.48774355751099935, |
|
"grad_norm": 11.187983512878418, |
|
"learning_rate": 0.00017322595677160375, |
|
"loss": 1.768, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.4883720930232558, |
|
"grad_norm": 3.171013116836548, |
|
"learning_rate": 0.00017304125780233194, |
|
"loss": 1.7985, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.48900062853551224, |
|
"grad_norm": 3.1267857551574707, |
|
"learning_rate": 0.00017285603610620363, |
|
"loss": 1.5857, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.4896291640477687, |
|
"grad_norm": 7.174981594085693, |
|
"learning_rate": 0.00017267029321904397, |
|
"loss": 1.7335, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.49025769956002513, |
|
"grad_norm": 2.160280227661133, |
|
"learning_rate": 0.0001724840306809999, |
|
"loss": 2.0375, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.4908862350722816, |
|
"grad_norm": 6.354787349700928, |
|
"learning_rate": 0.0001722972500365271, |
|
"loss": 1.5088, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.491514770584538, |
|
"grad_norm": 31.453384399414062, |
|
"learning_rate": 0.00017210995283437733, |
|
"loss": 1.6668, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.49214330609679446, |
|
"grad_norm": 45.95703887939453, |
|
"learning_rate": 0.0001719221406275856, |
|
"loss": 1.4828, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.4927718416090509, |
|
"grad_norm": 3.3195204734802246, |
|
"learning_rate": 0.00017173381497345719, |
|
"loss": 1.8632, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.49340037712130735, |
|
"grad_norm": 2.7932851314544678, |
|
"learning_rate": 0.00017154497743355479, |
|
"loss": 1.8966, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.4940289126335638, |
|
"grad_norm": 4.167323112487793, |
|
"learning_rate": 0.00017135562957368554, |
|
"loss": 1.586, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.49465744814582024, |
|
"grad_norm": 2.7749907970428467, |
|
"learning_rate": 0.00017116577296388813, |
|
"loss": 1.8016, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.4952859836580767, |
|
"grad_norm": 3.2511887550354004, |
|
"learning_rate": 0.0001709754091784197, |
|
"loss": 2.0155, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.49591451917033313, |
|
"grad_norm": 3.043982744216919, |
|
"learning_rate": 0.00017078453979574274, |
|
"loss": 1.7555, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.4965430546825896, |
|
"grad_norm": 4.874665260314941, |
|
"learning_rate": 0.00017059316639851207, |
|
"loss": 1.8308, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.497171590194846, |
|
"grad_norm": 4.281073093414307, |
|
"learning_rate": 0.00017040129057356186, |
|
"loss": 1.6109, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.49780012570710247, |
|
"grad_norm": 4.453769207000732, |
|
"learning_rate": 0.0001702089139118921, |
|
"loss": 1.5652, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.4984286612193589, |
|
"grad_norm": 2.850391149520874, |
|
"learning_rate": 0.00017001603800865576, |
|
"loss": 1.9772, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.49905719673161536, |
|
"grad_norm": 2.58915114402771, |
|
"learning_rate": 0.00016982266446314537, |
|
"loss": 1.7888, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.4996857322438718, |
|
"grad_norm": 2.1942155361175537, |
|
"learning_rate": 0.00016962879487877989, |
|
"loss": 1.7295, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.5003142677561282, |
|
"grad_norm": 5.866901874542236, |
|
"learning_rate": 0.0001694344308630913, |
|
"loss": 1.6227, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.5009428032683847, |
|
"grad_norm": 4.190849304199219, |
|
"learning_rate": 0.00016923957402771128, |
|
"loss": 1.9444, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.5015713387806411, |
|
"grad_norm": 3.735076665878296, |
|
"learning_rate": 0.000169044225988358, |
|
"loss": 1.5726, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.5021998742928976, |
|
"grad_norm": 2.586787223815918, |
|
"learning_rate": 0.00016884838836482243, |
|
"loss": 1.7038, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.502828409805154, |
|
"grad_norm": 3.8179101943969727, |
|
"learning_rate": 0.0001686520627809553, |
|
"loss": 1.8377, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.502828409805154, |
|
"eval_loss": 1.0471904277801514, |
|
"eval_runtime": 1566.6812, |
|
"eval_samples_per_second": 1.646, |
|
"eval_steps_per_second": 1.646, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5034569453174105, |
|
"grad_norm": 3.700340986251831, |
|
"learning_rate": 0.00016845525086465327, |
|
"loss": 1.3971, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.5040854808296669, |
|
"grad_norm": 3.863966703414917, |
|
"learning_rate": 0.00016825795424784554, |
|
"loss": 1.3689, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.5047140163419234, |
|
"grad_norm": 5.150142192840576, |
|
"learning_rate": 0.00016806017456648056, |
|
"loss": 1.5778, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.5053425518541798, |
|
"grad_norm": 3.1379594802856445, |
|
"learning_rate": 0.0001678619134605121, |
|
"loss": 1.6475, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.5059710873664363, |
|
"grad_norm": 2.5254056453704834, |
|
"learning_rate": 0.00016766317257388576, |
|
"loss": 1.9058, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.5065996228786926, |
|
"grad_norm": 3.2213141918182373, |
|
"learning_rate": 0.00016746395355452564, |
|
"loss": 1.5966, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.507228158390949, |
|
"grad_norm": 2.67704701423645, |
|
"learning_rate": 0.0001672642580543202, |
|
"loss": 1.8397, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.5078566939032055, |
|
"grad_norm": 2.7627856731414795, |
|
"learning_rate": 0.00016706408772910894, |
|
"loss": 1.9829, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.5084852294154619, |
|
"grad_norm": 3.3486082553863525, |
|
"learning_rate": 0.00016686344423866843, |
|
"loss": 2.0275, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.5091137649277184, |
|
"grad_norm": 4.416764736175537, |
|
"learning_rate": 0.00016666232924669866, |
|
"loss": 1.7995, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.5097423004399748, |
|
"grad_norm": 3.926478862762451, |
|
"learning_rate": 0.00016646074442080926, |
|
"loss": 1.3349, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.5103708359522313, |
|
"grad_norm": 3.0945920944213867, |
|
"learning_rate": 0.00016625869143250564, |
|
"loss": 1.8098, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.5109993714644877, |
|
"grad_norm": 7.1972784996032715, |
|
"learning_rate": 0.0001660561719571751, |
|
"loss": 1.7203, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.5116279069767442, |
|
"grad_norm": 2.261600971221924, |
|
"learning_rate": 0.00016585318767407296, |
|
"loss": 1.8334, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.5122564424890006, |
|
"grad_norm": 23.770172119140625, |
|
"learning_rate": 0.00016564974026630873, |
|
"loss": 1.2436, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.5128849780012571, |
|
"grad_norm": 1.8770813941955566, |
|
"learning_rate": 0.0001654458314208319, |
|
"loss": 1.9914, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.5135135135135135, |
|
"grad_norm": 2.9977574348449707, |
|
"learning_rate": 0.00016524146282841825, |
|
"loss": 1.5698, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.51414204902577, |
|
"grad_norm": 4.5086565017700195, |
|
"learning_rate": 0.00016503663618365572, |
|
"loss": 1.8745, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.5147705845380264, |
|
"grad_norm": 2.250277280807495, |
|
"learning_rate": 0.00016483135318493024, |
|
"loss": 2.0471, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.5153991200502829, |
|
"grad_norm": 2.7155048847198486, |
|
"learning_rate": 0.0001646256155344118, |
|
"loss": 1.7132, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.5160276555625393, |
|
"grad_norm": 2.070505142211914, |
|
"learning_rate": 0.00016441942493804028, |
|
"loss": 1.5661, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.5166561910747958, |
|
"grad_norm": 2.1399850845336914, |
|
"learning_rate": 0.0001642127831055113, |
|
"loss": 2.2479, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.5172847265870522, |
|
"grad_norm": 10.017570495605469, |
|
"learning_rate": 0.00016400569175026202, |
|
"loss": 1.8744, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.5179132620993087, |
|
"grad_norm": 7.541717529296875, |
|
"learning_rate": 0.00016379815258945704, |
|
"loss": 1.787, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.518541797611565, |
|
"grad_norm": 5.188529968261719, |
|
"learning_rate": 0.00016359016734397396, |
|
"loss": 1.8559, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.5191703331238215, |
|
"grad_norm": 2.2011945247650146, |
|
"learning_rate": 0.00016338173773838938, |
|
"loss": 1.5309, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.5197988686360779, |
|
"grad_norm": 3.5199337005615234, |
|
"learning_rate": 0.00016317286550096436, |
|
"loss": 1.4987, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.5204274041483343, |
|
"grad_norm": 3.5257673263549805, |
|
"learning_rate": 0.00016296355236363022, |
|
"loss": 1.4651, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.5210559396605908, |
|
"grad_norm": 7.473969459533691, |
|
"learning_rate": 0.00016275380006197413, |
|
"loss": 1.3646, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.5216844751728472, |
|
"grad_norm": 3.5532689094543457, |
|
"learning_rate": 0.0001625436103352247, |
|
"loss": 1.6972, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.5223130106851037, |
|
"grad_norm": 3.332782745361328, |
|
"learning_rate": 0.00016233298492623766, |
|
"loss": 1.8272, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.5229415461973601, |
|
"grad_norm": 2.6560447216033936, |
|
"learning_rate": 0.00016212192558148127, |
|
"loss": 1.849, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.5235700817096166, |
|
"grad_norm": 2.767364978790283, |
|
"learning_rate": 0.00016191043405102199, |
|
"loss": 1.6504, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.524198617221873, |
|
"grad_norm": 2.5089499950408936, |
|
"learning_rate": 0.0001616985120885098, |
|
"loss": 1.5984, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.5248271527341295, |
|
"grad_norm": 2.243863582611084, |
|
"learning_rate": 0.0001614861614511638, |
|
"loss": 1.9573, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.5254556882463859, |
|
"grad_norm": 5.696925640106201, |
|
"learning_rate": 0.00016127338389975758, |
|
"loss": 1.5365, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.5260842237586424, |
|
"grad_norm": 15.416232109069824, |
|
"learning_rate": 0.00016106018119860455, |
|
"loss": 1.5344, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.5267127592708988, |
|
"grad_norm": 3.5068626403808594, |
|
"learning_rate": 0.00016084655511554354, |
|
"loss": 1.3403, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.5273412947831553, |
|
"grad_norm": 2.7104172706604004, |
|
"learning_rate": 0.00016063250742192386, |
|
"loss": 1.6286, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.5279698302954117, |
|
"grad_norm": 2.5087764263153076, |
|
"learning_rate": 0.00016041803989259076, |
|
"loss": 1.7183, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.5285983658076682, |
|
"grad_norm": 8.253801345825195, |
|
"learning_rate": 0.00016020315430587072, |
|
"loss": 1.944, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.5292269013199246, |
|
"grad_norm": 6.215940952301025, |
|
"learning_rate": 0.00015998785244355669, |
|
"loss": 1.2669, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.5298554368321811, |
|
"grad_norm": 2.5656116008758545, |
|
"learning_rate": 0.00015977213609089322, |
|
"loss": 2.0271, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.5304839723444374, |
|
"grad_norm": 5.3323493003845215, |
|
"learning_rate": 0.00015955600703656186, |
|
"loss": 1.4939, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.531112507856694, |
|
"grad_norm": 2.5133509635925293, |
|
"learning_rate": 0.00015933946707266615, |
|
"loss": 2.0631, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.5317410433689503, |
|
"grad_norm": 2.797114610671997, |
|
"learning_rate": 0.0001591225179947168, |
|
"loss": 1.8101, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.5323695788812068, |
|
"grad_norm": 3.838271141052246, |
|
"learning_rate": 0.00015890516160161678, |
|
"loss": 1.4939, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.5329981143934632, |
|
"grad_norm": 6.3040995597839355, |
|
"learning_rate": 0.0001586873996956466, |
|
"loss": 1.7562, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.5336266499057196, |
|
"grad_norm": 3.633244514465332, |
|
"learning_rate": 0.0001584692340824491, |
|
"loss": 1.9957, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.5342551854179761, |
|
"grad_norm": 3.348219156265259, |
|
"learning_rate": 0.00015825066657101453, |
|
"loss": 1.3537, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5348837209302325, |
|
"grad_norm": 2.8102500438690186, |
|
"learning_rate": 0.00015803169897366572, |
|
"loss": 2.0199, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.535512256442489, |
|
"grad_norm": 2.8683907985687256, |
|
"learning_rate": 0.00015781233310604287, |
|
"loss": 1.8299, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.5361407919547454, |
|
"grad_norm": 2.856029510498047, |
|
"learning_rate": 0.00015759257078708867, |
|
"loss": 1.5422, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.5367693274670019, |
|
"grad_norm": 3.02531099319458, |
|
"learning_rate": 0.000157372413839033, |
|
"loss": 1.7447, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.5373978629792583, |
|
"grad_norm": 2.963717460632324, |
|
"learning_rate": 0.000157151864087378, |
|
"loss": 1.6809, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.5380263984915148, |
|
"grad_norm": 16.036455154418945, |
|
"learning_rate": 0.00015693092336088274, |
|
"loss": 1.5663, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.5386549340037712, |
|
"grad_norm": 14.2581148147583, |
|
"learning_rate": 0.00015670959349154838, |
|
"loss": 1.6203, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.5392834695160277, |
|
"grad_norm": 2.873945474624634, |
|
"learning_rate": 0.00015648787631460272, |
|
"loss": 1.6255, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.5399120050282841, |
|
"grad_norm": 2.8614494800567627, |
|
"learning_rate": 0.0001562657736684849, |
|
"loss": 1.7169, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.5405405405405406, |
|
"grad_norm": 2.071613311767578, |
|
"learning_rate": 0.0001560432873948305, |
|
"loss": 1.8202, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.541169076052797, |
|
"grad_norm": 3.9846320152282715, |
|
"learning_rate": 0.00015582041933845598, |
|
"loss": 1.4323, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.5417976115650535, |
|
"grad_norm": 2.527543544769287, |
|
"learning_rate": 0.00015559717134734343, |
|
"loss": 1.5789, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.5424261470773099, |
|
"grad_norm": 2.104928493499756, |
|
"learning_rate": 0.00015537354527262538, |
|
"loss": 1.8236, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.5430546825895664, |
|
"grad_norm": 2.0675976276397705, |
|
"learning_rate": 0.00015514954296856932, |
|
"loss": 1.7982, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.5436832181018227, |
|
"grad_norm": 4.772681713104248, |
|
"learning_rate": 0.0001549251662925624, |
|
"loss": 1.816, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.5443117536140792, |
|
"grad_norm": 3.118591785430908, |
|
"learning_rate": 0.00015470041710509587, |
|
"loss": 1.8371, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.5449402891263356, |
|
"grad_norm": 3.6090643405914307, |
|
"learning_rate": 0.00015447529726975, |
|
"loss": 1.6502, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.5455688246385921, |
|
"grad_norm": 4.579808235168457, |
|
"learning_rate": 0.00015424980865317816, |
|
"loss": 1.8025, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.5461973601508485, |
|
"grad_norm": 2.9439306259155273, |
|
"learning_rate": 0.00015402395312509178, |
|
"loss": 1.8314, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.5468258956631049, |
|
"grad_norm": 3.228983163833618, |
|
"learning_rate": 0.00015379773255824454, |
|
"loss": 1.6519, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.5474544311753614, |
|
"grad_norm": 3.073241949081421, |
|
"learning_rate": 0.0001535711488284171, |
|
"loss": 1.4897, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.5480829666876178, |
|
"grad_norm": 3.359070062637329, |
|
"learning_rate": 0.00015334420381440115, |
|
"loss": 1.9755, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.5487115021998743, |
|
"grad_norm": 2.6963229179382324, |
|
"learning_rate": 0.00015311689939798435, |
|
"loss": 1.5332, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.5493400377121307, |
|
"grad_norm": 4.286027431488037, |
|
"learning_rate": 0.0001528892374639343, |
|
"loss": 1.5066, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.5499685732243872, |
|
"grad_norm": 3.306358814239502, |
|
"learning_rate": 0.00015266121989998313, |
|
"loss": 1.2646, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.5505971087366436, |
|
"grad_norm": 4.356008529663086, |
|
"learning_rate": 0.00015243284859681178, |
|
"loss": 1.4592, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.5512256442489001, |
|
"grad_norm": 2.7557637691497803, |
|
"learning_rate": 0.0001522041254480343, |
|
"loss": 2.0077, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.5518541797611565, |
|
"grad_norm": 5.654608249664307, |
|
"learning_rate": 0.0001519750523501823, |
|
"loss": 1.5102, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.552482715273413, |
|
"grad_norm": 3.0355632305145264, |
|
"learning_rate": 0.00015174563120268886, |
|
"loss": 1.7046, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.5531112507856694, |
|
"grad_norm": 2.3773937225341797, |
|
"learning_rate": 0.00015151586390787337, |
|
"loss": 2.0118, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5537397862979259, |
|
"grad_norm": 1.9712527990341187, |
|
"learning_rate": 0.00015128575237092505, |
|
"loss": 1.6286, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.5543683218101823, |
|
"grad_norm": 2.4755544662475586, |
|
"learning_rate": 0.0001510552984998878, |
|
"loss": 1.7739, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.5549968573224388, |
|
"grad_norm": 1.8995095491409302, |
|
"learning_rate": 0.00015082450420564393, |
|
"loss": 1.9459, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.5556253928346951, |
|
"grad_norm": 1.985937237739563, |
|
"learning_rate": 0.0001505933714018986, |
|
"loss": 1.579, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.5562539283469516, |
|
"grad_norm": 2.6388299465179443, |
|
"learning_rate": 0.00015036190200516366, |
|
"loss": 1.6257, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.556882463859208, |
|
"grad_norm": 1.895621418952942, |
|
"learning_rate": 0.0001501300979347421, |
|
"loss": 1.8962, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.5575109993714645, |
|
"grad_norm": 54.625526428222656, |
|
"learning_rate": 0.00014989796111271187, |
|
"loss": 1.7338, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.5581395348837209, |
|
"grad_norm": 2.2018158435821533, |
|
"learning_rate": 0.00014966549346391009, |
|
"loss": 1.8835, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.5587680703959774, |
|
"grad_norm": 5.001894474029541, |
|
"learning_rate": 0.00014943269691591692, |
|
"loss": 1.7645, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.5593966059082338, |
|
"grad_norm": 24.939252853393555, |
|
"learning_rate": 0.0001491995733990399, |
|
"loss": 1.7232, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5600251414204902, |
|
"grad_norm": 4.670384883880615, |
|
"learning_rate": 0.00014896612484629755, |
|
"loss": 1.5004, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.5606536769327467, |
|
"grad_norm": 2.6586263179779053, |
|
"learning_rate": 0.00014873235319340359, |
|
"loss": 1.6975, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.5612822124450031, |
|
"grad_norm": 2.209890604019165, |
|
"learning_rate": 0.00014849826037875084, |
|
"loss": 1.5577, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.5619107479572596, |
|
"grad_norm": 3.994396686553955, |
|
"learning_rate": 0.00014826384834339522, |
|
"loss": 1.2563, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.562539283469516, |
|
"grad_norm": 2.467743158340454, |
|
"learning_rate": 0.00014802911903103935, |
|
"loss": 1.8903, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.5631678189817725, |
|
"grad_norm": 2.7943837642669678, |
|
"learning_rate": 0.00014779407438801685, |
|
"loss": 1.8327, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.5637963544940289, |
|
"grad_norm": 3.3911454677581787, |
|
"learning_rate": 0.00014755871636327594, |
|
"loss": 1.4657, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.5644248900062854, |
|
"grad_norm": 2.7245395183563232, |
|
"learning_rate": 0.00014732304690836336, |
|
"loss": 1.6989, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.5650534255185418, |
|
"grad_norm": 2.283728837966919, |
|
"learning_rate": 0.00014708706797740814, |
|
"loss": 1.6395, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.5656819610307983, |
|
"grad_norm": 6.15742826461792, |
|
"learning_rate": 0.0001468507815271055, |
|
"loss": 1.5643, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5663104965430547, |
|
"grad_norm": 3.2166006565093994, |
|
"learning_rate": 0.00014661418951670042, |
|
"loss": 2.0207, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.5669390320553112, |
|
"grad_norm": 2.7222771644592285, |
|
"learning_rate": 0.00014637729390797172, |
|
"loss": 1.8429, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.5675675675675675, |
|
"grad_norm": 2.573390483856201, |
|
"learning_rate": 0.00014614009666521542, |
|
"loss": 1.5806, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.568196103079824, |
|
"grad_norm": 2.363739490509033, |
|
"learning_rate": 0.00014590259975522877, |
|
"loss": 1.6681, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.5688246385920804, |
|
"grad_norm": 2.040877342224121, |
|
"learning_rate": 0.0001456648051472937, |
|
"loss": 1.9898, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.5694531741043369, |
|
"grad_norm": 2.5492324829101562, |
|
"learning_rate": 0.00014542671481316072, |
|
"loss": 1.111, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.5700817096165933, |
|
"grad_norm": 9.515697479248047, |
|
"learning_rate": 0.00014518833072703227, |
|
"loss": 1.8542, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.5707102451288498, |
|
"grad_norm": 2.2363474369049072, |
|
"learning_rate": 0.00014494965486554678, |
|
"loss": 1.7958, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.5713387806411062, |
|
"grad_norm": 4.901029109954834, |
|
"learning_rate": 0.0001447106892077618, |
|
"loss": 1.9061, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.5719673161533627, |
|
"grad_norm": 7.2911882400512695, |
|
"learning_rate": 0.00014447143573513796, |
|
"loss": 1.3471, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.5725958516656191, |
|
"grad_norm": 2.8644142150878906, |
|
"learning_rate": 0.00014423189643152226, |
|
"loss": 1.6385, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.5732243871778756, |
|
"grad_norm": 2.798529624938965, |
|
"learning_rate": 0.00014399207328313194, |
|
"loss": 1.8201, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.573852922690132, |
|
"grad_norm": 2.2028634548187256, |
|
"learning_rate": 0.00014375196827853766, |
|
"loss": 1.6705, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.5744814582023884, |
|
"grad_norm": 2.8815603256225586, |
|
"learning_rate": 0.00014351158340864732, |
|
"loss": 1.7115, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.5751099937146449, |
|
"grad_norm": 3.91245436668396, |
|
"learning_rate": 0.00014327092066668932, |
|
"loss": 1.8134, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.5757385292269013, |
|
"grad_norm": 2.8592488765716553, |
|
"learning_rate": 0.00014302998204819618, |
|
"loss": 1.5497, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.5763670647391578, |
|
"grad_norm": 2.7535643577575684, |
|
"learning_rate": 0.0001427887695509879, |
|
"loss": 1.6112, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.5769956002514142, |
|
"grad_norm": 3.177537679672241, |
|
"learning_rate": 0.00014254728517515547, |
|
"loss": 1.7777, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.5776241357636707, |
|
"grad_norm": 2.271500825881958, |
|
"learning_rate": 0.00014230553092304415, |
|
"loss": 1.9161, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.578252671275927, |
|
"grad_norm": 2.780168056488037, |
|
"learning_rate": 0.00014206350879923714, |
|
"loss": 1.602, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.5788812067881836, |
|
"grad_norm": 1.8702342510223389, |
|
"learning_rate": 0.00014182122081053865, |
|
"loss": 1.6333, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.57950974230044, |
|
"grad_norm": 2.2033650875091553, |
|
"learning_rate": 0.00014157866896595745, |
|
"loss": 1.7852, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.5801382778126964, |
|
"grad_norm": 2.3561549186706543, |
|
"learning_rate": 0.00014133585527669007, |
|
"loss": 1.5509, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.5807668133249528, |
|
"grad_norm": 2.623044967651367, |
|
"learning_rate": 0.00014109278175610435, |
|
"loss": 1.9336, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.5813953488372093, |
|
"grad_norm": 2.6602065563201904, |
|
"learning_rate": 0.00014084945041972256, |
|
"loss": 2.0173, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.5820238843494657, |
|
"grad_norm": 7.116496562957764, |
|
"learning_rate": 0.0001406058632852046, |
|
"loss": 1.8753, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.5826524198617222, |
|
"grad_norm": 6.817981243133545, |
|
"learning_rate": 0.0001403620223723316, |
|
"loss": 1.6024, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.5832809553739786, |
|
"grad_norm": 1.9530831575393677, |
|
"learning_rate": 0.00014011792970298887, |
|
"loss": 1.5929, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.5839094908862351, |
|
"grad_norm": 3.191023826599121, |
|
"learning_rate": 0.0001398735873011492, |
|
"loss": 1.6694, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.5845380263984915, |
|
"grad_norm": 2.2665297985076904, |
|
"learning_rate": 0.0001396289971928562, |
|
"loss": 1.5795, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.585166561910748, |
|
"grad_norm": 3.497770071029663, |
|
"learning_rate": 0.00013938416140620738, |
|
"loss": 1.8285, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.5857950974230044, |
|
"grad_norm": 4.176694869995117, |
|
"learning_rate": 0.00013913908197133742, |
|
"loss": 1.2752, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.5864236329352609, |
|
"grad_norm": 2.205921173095703, |
|
"learning_rate": 0.00013889376092040122, |
|
"loss": 1.7818, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.5870521684475173, |
|
"grad_norm": 2.4708597660064697, |
|
"learning_rate": 0.0001386482002875571, |
|
"loss": 1.7249, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.5876807039597737, |
|
"grad_norm": 4.025508880615234, |
|
"learning_rate": 0.00013840240210895007, |
|
"loss": 1.2812, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.5883092394720302, |
|
"grad_norm": 2.7530086040496826, |
|
"learning_rate": 0.00013815636842269468, |
|
"loss": 1.746, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.5889377749842866, |
|
"grad_norm": 1.9235401153564453, |
|
"learning_rate": 0.00013791010126885838, |
|
"loss": 2.1382, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.5895663104965431, |
|
"grad_norm": 2.125422239303589, |
|
"learning_rate": 0.00013766360268944444, |
|
"loss": 1.6725, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.5901948460087995, |
|
"grad_norm": 2.940282106399536, |
|
"learning_rate": 0.0001374168747283751, |
|
"loss": 1.4106, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.590823381521056, |
|
"grad_norm": 3.4331438541412354, |
|
"learning_rate": 0.00013716991943147453, |
|
"loss": 1.2228, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.5914519170333123, |
|
"grad_norm": 2.6568222045898438, |
|
"learning_rate": 0.000136922738846452, |
|
"loss": 1.8888, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.5920804525455688, |
|
"grad_norm": 2.1608529090881348, |
|
"learning_rate": 0.00013667533502288476, |
|
"loss": 1.7013, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.5927089880578252, |
|
"grad_norm": 2.2908928394317627, |
|
"learning_rate": 0.00013642771001220121, |
|
"loss": 1.6208, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.5933375235700817, |
|
"grad_norm": 3.6159627437591553, |
|
"learning_rate": 0.00013617986586766373, |
|
"loss": 1.4259, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.5939660590823381, |
|
"grad_norm": 2.032099485397339, |
|
"learning_rate": 0.00013593180464435166, |
|
"loss": 1.8553, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.5945945945945946, |
|
"grad_norm": 7.025946140289307, |
|
"learning_rate": 0.00013568352839914445, |
|
"loss": 1.6668, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.595223130106851, |
|
"grad_norm": 2.595669746398926, |
|
"learning_rate": 0.00013543503919070439, |
|
"loss": 1.7053, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.5958516656191075, |
|
"grad_norm": 2.9132773876190186, |
|
"learning_rate": 0.0001351863390794596, |
|
"loss": 1.6292, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.5964802011313639, |
|
"grad_norm": 2.3960158824920654, |
|
"learning_rate": 0.00013493743012758707, |
|
"loss": 1.557, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.5971087366436204, |
|
"grad_norm": 1.575936198234558, |
|
"learning_rate": 0.00013468831439899533, |
|
"loss": 1.6991, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5977372721558768, |
|
"grad_norm": 2.822359800338745, |
|
"learning_rate": 0.00013443899395930762, |
|
"loss": 1.391, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.5983658076681333, |
|
"grad_norm": 5.096828937530518, |
|
"learning_rate": 0.00013418947087584445, |
|
"loss": 1.9117, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.5989943431803897, |
|
"grad_norm": 4.0271315574646, |
|
"learning_rate": 0.00013393974721760674, |
|
"loss": 1.3417, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.5996228786926462, |
|
"grad_norm": 2.699956178665161, |
|
"learning_rate": 0.0001336898250552584, |
|
"loss": 1.7204, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.6002514142049026, |
|
"grad_norm": 3.784418821334839, |
|
"learning_rate": 0.0001334397064611095, |
|
"loss": 1.3982, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.600879949717159, |
|
"grad_norm": 2.163050413131714, |
|
"learning_rate": 0.00013318939350909868, |
|
"loss": 1.6849, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.6015084852294155, |
|
"grad_norm": 2.501710891723633, |
|
"learning_rate": 0.00013293888827477633, |
|
"loss": 1.6264, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.6021370207416719, |
|
"grad_norm": 2.0204994678497314, |
|
"learning_rate": 0.00013268819283528707, |
|
"loss": 1.6999, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.6027655562539284, |
|
"grad_norm": 2.1536333560943604, |
|
"learning_rate": 0.00013243730926935278, |
|
"loss": 1.6226, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.6033940917661847, |
|
"grad_norm": 2.409135103225708, |
|
"learning_rate": 0.00013218623965725518, |
|
"loss": 1.8002, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.6033940917661847, |
|
"eval_loss": 1.0100071430206299, |
|
"eval_runtime": 1572.7567, |
|
"eval_samples_per_second": 1.639, |
|
"eval_steps_per_second": 1.639, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.6040226272784412, |
|
"grad_norm": 2.5011773109436035, |
|
"learning_rate": 0.00013193498608081873, |
|
"loss": 1.6751, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.6046511627906976, |
|
"grad_norm": 3.5764825344085693, |
|
"learning_rate": 0.00013168355062339316, |
|
"loss": 1.8907, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.6052796983029541, |
|
"grad_norm": 8.140620231628418, |
|
"learning_rate": 0.00013143193536983646, |
|
"loss": 1.6468, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.6059082338152105, |
|
"grad_norm": 2.4835751056671143, |
|
"learning_rate": 0.00013118014240649732, |
|
"loss": 2.0628, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.606536769327467, |
|
"grad_norm": 2.2530457973480225, |
|
"learning_rate": 0.00013092817382119815, |
|
"loss": 1.7598, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.6071653048397234, |
|
"grad_norm": 4.617806911468506, |
|
"learning_rate": 0.00013067603170321742, |
|
"loss": 1.8704, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.6077938403519799, |
|
"grad_norm": 2.2900989055633545, |
|
"learning_rate": 0.00013042371814327256, |
|
"loss": 2.0224, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.6084223758642363, |
|
"grad_norm": 2.316289186477661, |
|
"learning_rate": 0.00013017123523350257, |
|
"loss": 1.5582, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.6090509113764928, |
|
"grad_norm": 2.241147994995117, |
|
"learning_rate": 0.0001299185850674507, |
|
"loss": 2.1075, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.6096794468887492, |
|
"grad_norm": 2.1747395992279053, |
|
"learning_rate": 0.00012966576974004704, |
|
"loss": 1.8833, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.6103079824010057, |
|
"grad_norm": 2.43947696685791, |
|
"learning_rate": 0.00012941279134759113, |
|
"loss": 1.638, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.6109365179132621, |
|
"grad_norm": 2.17942214012146, |
|
"learning_rate": 0.00012915965198773458, |
|
"loss": 1.9326, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.6115650534255186, |
|
"grad_norm": 2.369276762008667, |
|
"learning_rate": 0.0001289063537594639, |
|
"loss": 1.8046, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.612193588937775, |
|
"grad_norm": 2.682879686355591, |
|
"learning_rate": 0.00012865289876308266, |
|
"loss": 1.6225, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.6128221244500315, |
|
"grad_norm": 3.7697911262512207, |
|
"learning_rate": 0.00012839928910019454, |
|
"loss": 1.2141, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.6134506599622879, |
|
"grad_norm": 2.3096630573272705, |
|
"learning_rate": 0.00012814552687368552, |
|
"loss": 1.5018, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.6140791954745443, |
|
"grad_norm": 2.0321295261383057, |
|
"learning_rate": 0.00012789161418770676, |
|
"loss": 1.663, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.6147077309868008, |
|
"grad_norm": 10.517263412475586, |
|
"learning_rate": 0.0001276375531476569, |
|
"loss": 1.7105, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.6153362664990571, |
|
"grad_norm": 2.158304452896118, |
|
"learning_rate": 0.00012738334586016468, |
|
"loss": 1.651, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.6159648020113137, |
|
"grad_norm": 1.8691167831420898, |
|
"learning_rate": 0.00012712899443307165, |
|
"loss": 1.8252, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.61659333752357, |
|
"grad_norm": 7.265283107757568, |
|
"learning_rate": 0.00012687450097541436, |
|
"loss": 1.6606, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.6172218730358265, |
|
"grad_norm": 2.1034469604492188, |
|
"learning_rate": 0.00012661986759740715, |
|
"loss": 1.7667, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.6178504085480829, |
|
"grad_norm": 2.1536924839019775, |
|
"learning_rate": 0.00012636509641042457, |
|
"loss": 1.7351, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.6184789440603394, |
|
"grad_norm": 2.4590165615081787, |
|
"learning_rate": 0.00012611018952698376, |
|
"loss": 1.6598, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.6191074795725958, |
|
"grad_norm": 7.8274712562561035, |
|
"learning_rate": 0.00012585514906072713, |
|
"loss": 1.6875, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.6197360150848523, |
|
"grad_norm": 2.896134853363037, |
|
"learning_rate": 0.00012559997712640475, |
|
"loss": 1.5077, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.6203645505971087, |
|
"grad_norm": 10.719966888427734, |
|
"learning_rate": 0.0001253446758398567, |
|
"loss": 1.5387, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.6209930861093652, |
|
"grad_norm": 2.1745476722717285, |
|
"learning_rate": 0.00012508924731799567, |
|
"loss": 1.8332, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.6216216216216216, |
|
"grad_norm": 2.108567714691162, |
|
"learning_rate": 0.00012483369367878938, |
|
"loss": 1.8921, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.6222501571338781, |
|
"grad_norm": 2.3844761848449707, |
|
"learning_rate": 0.000124578017041243, |
|
"loss": 1.7234, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.6228786926461345, |
|
"grad_norm": 2.899996757507324, |
|
"learning_rate": 0.00012432221952538155, |
|
"loss": 1.6937, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.623507228158391, |
|
"grad_norm": 2.4060072898864746, |
|
"learning_rate": 0.00012406630325223237, |
|
"loss": 1.5577, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.6241357636706474, |
|
"grad_norm": 2.430901527404785, |
|
"learning_rate": 0.00012381027034380753, |
|
"loss": 1.9603, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.6247642991829039, |
|
"grad_norm": 2.878659725189209, |
|
"learning_rate": 0.00012355412292308618, |
|
"loss": 1.5541, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.6253928346951603, |
|
"grad_norm": 2.1778147220611572, |
|
"learning_rate": 0.00012329786311399702, |
|
"loss": 1.6953, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.6260213702074168, |
|
"grad_norm": 2.2221829891204834, |
|
"learning_rate": 0.00012304149304140068, |
|
"loss": 1.5897, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.6266499057196732, |
|
"grad_norm": 6.015483379364014, |
|
"learning_rate": 0.000122785014831072, |
|
"loss": 1.8711, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.6272784412319296, |
|
"grad_norm": 2.742525815963745, |
|
"learning_rate": 0.00012252843060968254, |
|
"loss": 1.457, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.627906976744186, |
|
"grad_norm": 2.0009000301361084, |
|
"learning_rate": 0.0001222717425047828, |
|
"loss": 1.8535, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.6285355122564424, |
|
"grad_norm": 2.5420360565185547, |
|
"learning_rate": 0.00012201495264478482, |
|
"loss": 1.7449, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6291640477686989, |
|
"grad_norm": 2.2101120948791504, |
|
"learning_rate": 0.0001217580631589442, |
|
"loss": 1.7598, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.6297925832809553, |
|
"grad_norm": 11.867177963256836, |
|
"learning_rate": 0.00012150107617734277, |
|
"loss": 1.6518, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.6304211187932118, |
|
"grad_norm": 2.011889934539795, |
|
"learning_rate": 0.00012124399383087062, |
|
"loss": 1.7085, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.6310496543054682, |
|
"grad_norm": 1.8404422998428345, |
|
"learning_rate": 0.00012098681825120878, |
|
"loss": 1.985, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.6316781898177247, |
|
"grad_norm": 5.306151390075684, |
|
"learning_rate": 0.00012072955157081119, |
|
"loss": 1.4185, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.6323067253299811, |
|
"grad_norm": 2.5981574058532715, |
|
"learning_rate": 0.00012047219592288723, |
|
"loss": 1.7356, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.6329352608422376, |
|
"grad_norm": 2.5699188709259033, |
|
"learning_rate": 0.00012021475344138401, |
|
"loss": 1.6779, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.633563796354494, |
|
"grad_norm": 3.318235158920288, |
|
"learning_rate": 0.00011995722626096868, |
|
"loss": 1.5733, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.6341923318667505, |
|
"grad_norm": 3.4934089183807373, |
|
"learning_rate": 0.00011969961651701056, |
|
"loss": 1.9764, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.6348208673790069, |
|
"grad_norm": 4.56378698348999, |
|
"learning_rate": 0.00011944192634556373, |
|
"loss": 1.5879, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.6354494028912634, |
|
"grad_norm": 3.962162494659424, |
|
"learning_rate": 0.00011918415788334904, |
|
"loss": 1.3625, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.6360779384035198, |
|
"grad_norm": 2.2097489833831787, |
|
"learning_rate": 0.00011892631326773663, |
|
"loss": 1.9658, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.6367064739157763, |
|
"grad_norm": 2.3107056617736816, |
|
"learning_rate": 0.00011866839463672797, |
|
"loss": 1.4506, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.6373350094280327, |
|
"grad_norm": 2.272876501083374, |
|
"learning_rate": 0.00011841040412893833, |
|
"loss": 1.3666, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.6379635449402892, |
|
"grad_norm": 4.438763618469238, |
|
"learning_rate": 0.00011815234388357894, |
|
"loss": 1.8106, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.6385920804525456, |
|
"grad_norm": 4.535871982574463, |
|
"learning_rate": 0.00011789421604043936, |
|
"loss": 1.4976, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.6392206159648021, |
|
"grad_norm": 4.039824485778809, |
|
"learning_rate": 0.00011763602273986947, |
|
"loss": 1.6569, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.6398491514770585, |
|
"grad_norm": 2.080919027328491, |
|
"learning_rate": 0.0001173777661227621, |
|
"loss": 1.8796, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.6404776869893148, |
|
"grad_norm": 2.2640366554260254, |
|
"learning_rate": 0.00011711944833053501, |
|
"loss": 1.6282, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.6411062225015713, |
|
"grad_norm": 3.223726272583008, |
|
"learning_rate": 0.00011686107150511318, |
|
"loss": 1.6309, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.6417347580138277, |
|
"grad_norm": 2.8494515419006348, |
|
"learning_rate": 0.00011660263778891122, |
|
"loss": 1.841, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.6423632935260842, |
|
"grad_norm": 2.749600648880005, |
|
"learning_rate": 0.00011634414932481527, |
|
"loss": 1.6571, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.6429918290383406, |
|
"grad_norm": 2.505894899368286, |
|
"learning_rate": 0.0001160856082561656, |
|
"loss": 1.0753, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.6436203645505971, |
|
"grad_norm": 3.126493453979492, |
|
"learning_rate": 0.00011582701672673859, |
|
"loss": 1.6436, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.6442489000628535, |
|
"grad_norm": 4.720798492431641, |
|
"learning_rate": 0.00011556837688072903, |
|
"loss": 1.6071, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.64487743557511, |
|
"grad_norm": 2.224419593811035, |
|
"learning_rate": 0.00011530969086273233, |
|
"loss": 1.6933, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.6455059710873664, |
|
"grad_norm": 49.69495391845703, |
|
"learning_rate": 0.00011505096081772682, |
|
"loss": 1.5789, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.6461345065996229, |
|
"grad_norm": 3.9098896980285645, |
|
"learning_rate": 0.00011479218889105585, |
|
"loss": 1.7641, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.6467630421118793, |
|
"grad_norm": 2.3250463008880615, |
|
"learning_rate": 0.00011453337722840998, |
|
"loss": 1.5639, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.6473915776241358, |
|
"grad_norm": 5.340567111968994, |
|
"learning_rate": 0.00011427452797580935, |
|
"loss": 1.5509, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.6480201131363922, |
|
"grad_norm": 2.429529905319214, |
|
"learning_rate": 0.00011401564327958582, |
|
"loss": 1.8108, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.6486486486486487, |
|
"grad_norm": 3.1510725021362305, |
|
"learning_rate": 0.00011375672528636501, |
|
"loss": 1.6593, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.6492771841609051, |
|
"grad_norm": 2.572134494781494, |
|
"learning_rate": 0.0001134977761430487, |
|
"loss": 1.9824, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.6499057196731616, |
|
"grad_norm": 3.040400981903076, |
|
"learning_rate": 0.000113238797996797, |
|
"loss": 1.2756, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.650534255185418, |
|
"grad_norm": 3.099134922027588, |
|
"learning_rate": 0.00011297979299501045, |
|
"loss": 1.6269, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.6511627906976745, |
|
"grad_norm": 3.738058567047119, |
|
"learning_rate": 0.00011272076328531227, |
|
"loss": 1.4476, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.6517913262099309, |
|
"grad_norm": 2.7769501209259033, |
|
"learning_rate": 0.00011246171101553065, |
|
"loss": 1.4405, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.6524198617221874, |
|
"grad_norm": 2.682551145553589, |
|
"learning_rate": 0.00011220263833368063, |
|
"loss": 1.6413, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.6530483972344437, |
|
"grad_norm": 2.499070167541504, |
|
"learning_rate": 0.00011194354738794679, |
|
"loss": 1.5278, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.6536769327467001, |
|
"grad_norm": 2.7833590507507324, |
|
"learning_rate": 0.00011168444032666494, |
|
"loss": 1.6264, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.6543054682589566, |
|
"grad_norm": 2.357224464416504, |
|
"learning_rate": 0.00011142531929830458, |
|
"loss": 1.9489, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.654934003771213, |
|
"grad_norm": 2.214799165725708, |
|
"learning_rate": 0.000111166186451451, |
|
"loss": 1.4704, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.6555625392834695, |
|
"grad_norm": 2.328057050704956, |
|
"learning_rate": 0.00011090704393478757, |
|
"loss": 1.4429, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.6561910747957259, |
|
"grad_norm": 2.5801315307617188, |
|
"learning_rate": 0.00011064789389707771, |
|
"loss": 1.4603, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.6568196103079824, |
|
"grad_norm": 2.1976988315582275, |
|
"learning_rate": 0.00011038873848714733, |
|
"loss": 1.5088, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.6574481458202388, |
|
"grad_norm": 28.95213508605957, |
|
"learning_rate": 0.0001101295798538668, |
|
"loss": 1.7002, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.6580766813324953, |
|
"grad_norm": 4.742020130157471, |
|
"learning_rate": 0.00010987042014613322, |
|
"loss": 1.5954, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.6587052168447517, |
|
"grad_norm": 3.1142210960388184, |
|
"learning_rate": 0.0001096112615128527, |
|
"loss": 1.6351, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.6593337523570082, |
|
"grad_norm": 4.33069372177124, |
|
"learning_rate": 0.00010935210610292233, |
|
"loss": 1.6891, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.6599622878692646, |
|
"grad_norm": 4.653831958770752, |
|
"learning_rate": 0.00010909295606521248, |
|
"loss": 1.8824, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6605908233815211, |
|
"grad_norm": 5.533544063568115, |
|
"learning_rate": 0.00010883381354854902, |
|
"loss": 1.6467, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.6612193588937775, |
|
"grad_norm": 8.641119956970215, |
|
"learning_rate": 0.00010857468070169543, |
|
"loss": 1.2358, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.661847894406034, |
|
"grad_norm": 6.202789306640625, |
|
"learning_rate": 0.00010831555967333508, |
|
"loss": 1.624, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.6624764299182904, |
|
"grad_norm": 2.7470102310180664, |
|
"learning_rate": 0.00010805645261205322, |
|
"loss": 1.4665, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.6631049654305469, |
|
"grad_norm": 2.023517608642578, |
|
"learning_rate": 0.0001077973616663194, |
|
"loss": 1.9091, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.6637335009428033, |
|
"grad_norm": 2.7429111003875732, |
|
"learning_rate": 0.00010753828898446938, |
|
"loss": 1.8923, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.6643620364550598, |
|
"grad_norm": 3.8772711753845215, |
|
"learning_rate": 0.00010727923671468776, |
|
"loss": 1.3129, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.6649905719673161, |
|
"grad_norm": 3.5261390209198, |
|
"learning_rate": 0.00010702020700498955, |
|
"loss": 1.6074, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.6656191074795726, |
|
"grad_norm": 5.746640205383301, |
|
"learning_rate": 0.00010676120200320305, |
|
"loss": 1.4006, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.666247642991829, |
|
"grad_norm": 2.8895864486694336, |
|
"learning_rate": 0.0001065022238569513, |
|
"loss": 1.6521, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6668761785040854, |
|
"grad_norm": 3.021003484725952, |
|
"learning_rate": 0.000106243274713635, |
|
"loss": 1.7535, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.6675047140163419, |
|
"grad_norm": 2.5879180431365967, |
|
"learning_rate": 0.0001059843567204142, |
|
"loss": 1.6239, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.6681332495285983, |
|
"grad_norm": 1.9499133825302124, |
|
"learning_rate": 0.00010572547202419066, |
|
"loss": 1.8125, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.6687617850408548, |
|
"grad_norm": 2.0314812660217285, |
|
"learning_rate": 0.00010546662277159004, |
|
"loss": 2.0005, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.6693903205531112, |
|
"grad_norm": 3.00298810005188, |
|
"learning_rate": 0.0001052078111089442, |
|
"loss": 1.6808, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.6700188560653677, |
|
"grad_norm": 4.277320384979248, |
|
"learning_rate": 0.00010494903918227322, |
|
"loss": 1.8763, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.6706473915776241, |
|
"grad_norm": 2.7577366828918457, |
|
"learning_rate": 0.0001046903091372677, |
|
"loss": 1.8262, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.6712759270898806, |
|
"grad_norm": 3.0533790588378906, |
|
"learning_rate": 0.00010443162311927102, |
|
"loss": 1.6754, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.671904462602137, |
|
"grad_norm": 4.176212787628174, |
|
"learning_rate": 0.00010417298327326144, |
|
"loss": 1.7674, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.6725329981143935, |
|
"grad_norm": 2.346803903579712, |
|
"learning_rate": 0.00010391439174383441, |
|
"loss": 1.8885, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6731615336266499, |
|
"grad_norm": 1.954500436782837, |
|
"learning_rate": 0.00010365585067518475, |
|
"loss": 1.8633, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.6737900691389064, |
|
"grad_norm": 32.38813781738281, |
|
"learning_rate": 0.0001033973622110888, |
|
"loss": 1.3843, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.6744186046511628, |
|
"grad_norm": 2.352205753326416, |
|
"learning_rate": 0.00010313892849488684, |
|
"loss": 1.6986, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.6750471401634193, |
|
"grad_norm": 3.1915619373321533, |
|
"learning_rate": 0.00010288055166946504, |
|
"loss": 1.8131, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.6756756756756757, |
|
"grad_norm": 7.586877346038818, |
|
"learning_rate": 0.00010262223387723795, |
|
"loss": 1.5918, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.6763042111879322, |
|
"grad_norm": 4.168215751647949, |
|
"learning_rate": 0.00010236397726013059, |
|
"loss": 1.947, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.6769327467001885, |
|
"grad_norm": 2.3333709239959717, |
|
"learning_rate": 0.00010210578395956069, |
|
"loss": 1.5813, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.677561282212445, |
|
"grad_norm": 7.254258155822754, |
|
"learning_rate": 0.00010184765611642108, |
|
"loss": 1.4828, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.6781898177247014, |
|
"grad_norm": 2.7089152336120605, |
|
"learning_rate": 0.00010158959587106167, |
|
"loss": 1.3991, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.6788183532369579, |
|
"grad_norm": 3.343262195587158, |
|
"learning_rate": 0.00010133160536327207, |
|
"loss": 1.841, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6794468887492143, |
|
"grad_norm": 2.9180426597595215, |
|
"learning_rate": 0.00010107368673226338, |
|
"loss": 1.0978, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.6800754242614707, |
|
"grad_norm": 2.683201551437378, |
|
"learning_rate": 0.000100815842116651, |
|
"loss": 1.4882, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.6807039597737272, |
|
"grad_norm": 32.6074333190918, |
|
"learning_rate": 0.0001005580736544363, |
|
"loss": 1.5227, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.6813324952859836, |
|
"grad_norm": 3.46911883354187, |
|
"learning_rate": 0.00010030038348298946, |
|
"loss": 1.4316, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.6819610307982401, |
|
"grad_norm": 3.2221133708953857, |
|
"learning_rate": 0.00010004277373903137, |
|
"loss": 1.6761, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.6825895663104965, |
|
"grad_norm": 3.534419059753418, |
|
"learning_rate": 9.9785246558616e-05, |
|
"loss": 1.6618, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.683218101822753, |
|
"grad_norm": 2.5096707344055176, |
|
"learning_rate": 9.952780407711279e-05, |
|
"loss": 1.7826, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.6838466373350094, |
|
"grad_norm": 2.896599531173706, |
|
"learning_rate": 9.927044842918885e-05, |
|
"loss": 1.3887, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.6844751728472659, |
|
"grad_norm": 2.555786609649658, |
|
"learning_rate": 9.901318174879124e-05, |
|
"loss": 1.5155, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.6851037083595223, |
|
"grad_norm": 1.980133056640625, |
|
"learning_rate": 9.875600616912939e-05, |
|
"loss": 1.6521, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.6857322438717788, |
|
"grad_norm": 3.2604193687438965, |
|
"learning_rate": 9.849892382265728e-05, |
|
"loss": 1.5007, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.6863607793840352, |
|
"grad_norm": 2.7567083835601807, |
|
"learning_rate": 9.824193684105583e-05, |
|
"loss": 1.3282, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.6869893148962917, |
|
"grad_norm": 2.187772750854492, |
|
"learning_rate": 9.798504735521523e-05, |
|
"loss": 2.0221, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.6876178504085481, |
|
"grad_norm": 3.056830406188965, |
|
"learning_rate": 9.772825749521722e-05, |
|
"loss": 1.3636, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.6882463859208046, |
|
"grad_norm": 2.3823325634002686, |
|
"learning_rate": 9.747156939031752e-05, |
|
"loss": 1.7571, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.688874921433061, |
|
"grad_norm": 2.4851205348968506, |
|
"learning_rate": 9.721498516892804e-05, |
|
"loss": 1.5427, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.6895034569453174, |
|
"grad_norm": 3.261899471282959, |
|
"learning_rate": 9.695850695859932e-05, |
|
"loss": 1.6398, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.6901319924575738, |
|
"grad_norm": 2.661893129348755, |
|
"learning_rate": 9.6702136886003e-05, |
|
"loss": 2.042, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.6907605279698303, |
|
"grad_norm": 2.5974175930023193, |
|
"learning_rate": 9.644587707691384e-05, |
|
"loss": 1.7915, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.6913890634820867, |
|
"grad_norm": 2.121887445449829, |
|
"learning_rate": 9.618972965619253e-05, |
|
"loss": 1.745, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.6920175989943432, |
|
"grad_norm": 2.182525873184204, |
|
"learning_rate": 9.593369674776764e-05, |
|
"loss": 1.6437, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.6926461345065996, |
|
"grad_norm": 2.517305374145508, |
|
"learning_rate": 9.56777804746185e-05, |
|
"loss": 1.8382, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.6932746700188561, |
|
"grad_norm": 5.358400821685791, |
|
"learning_rate": 9.542198295875704e-05, |
|
"loss": 1.5445, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.6939032055311125, |
|
"grad_norm": 2.0764999389648438, |
|
"learning_rate": 9.516630632121067e-05, |
|
"loss": 1.2121, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.6945317410433689, |
|
"grad_norm": 2.8192496299743652, |
|
"learning_rate": 9.491075268200435e-05, |
|
"loss": 1.5149, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.6951602765556254, |
|
"grad_norm": 2.423426389694214, |
|
"learning_rate": 9.465532416014332e-05, |
|
"loss": 1.733, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.6957888120678818, |
|
"grad_norm": 2.0930826663970947, |
|
"learning_rate": 9.440002287359526e-05, |
|
"loss": 1.9933, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.6964173475801383, |
|
"grad_norm": 2.2740511894226074, |
|
"learning_rate": 9.414485093927286e-05, |
|
"loss": 1.7502, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.6970458830923947, |
|
"grad_norm": 2.221980333328247, |
|
"learning_rate": 9.388981047301628e-05, |
|
"loss": 1.5733, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.6976744186046512, |
|
"grad_norm": 20.93427085876465, |
|
"learning_rate": 9.363490358957547e-05, |
|
"loss": 1.9126, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.6983029541169076, |
|
"grad_norm": 2.46482253074646, |
|
"learning_rate": 9.33801324025929e-05, |
|
"loss": 1.5659, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.6989314896291641, |
|
"grad_norm": 2.0427463054656982, |
|
"learning_rate": 9.312549902458567e-05, |
|
"loss": 1.7553, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.6995600251414205, |
|
"grad_norm": 2.2169735431671143, |
|
"learning_rate": 9.28710055669284e-05, |
|
"loss": 1.4851, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.700188560653677, |
|
"grad_norm": 2.239305257797241, |
|
"learning_rate": 9.261665413983534e-05, |
|
"loss": 1.7265, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.7008170961659334, |
|
"grad_norm": 2.2883667945861816, |
|
"learning_rate": 9.236244685234314e-05, |
|
"loss": 1.5215, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.7014456316781899, |
|
"grad_norm": 3.0989158153533936, |
|
"learning_rate": 9.210838581229327e-05, |
|
"loss": 1.2851, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.7020741671904462, |
|
"grad_norm": 2.3321869373321533, |
|
"learning_rate": 9.18544731263145e-05, |
|
"loss": 1.8859, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.7027027027027027, |
|
"grad_norm": 9.674154281616211, |
|
"learning_rate": 9.160071089980551e-05, |
|
"loss": 1.7966, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.7033312382149591, |
|
"grad_norm": 2.8337531089782715, |
|
"learning_rate": 9.134710123691736e-05, |
|
"loss": 1.8802, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.7039597737272156, |
|
"grad_norm": 2.6572537422180176, |
|
"learning_rate": 9.109364624053619e-05, |
|
"loss": 1.7863, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.7039597737272156, |
|
"eval_loss": 0.9924196004867554, |
|
"eval_runtime": 1570.1427, |
|
"eval_samples_per_second": 1.642, |
|
"eval_steps_per_second": 1.642, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.704588309239472, |
|
"grad_norm": 2.6539511680603027, |
|
"learning_rate": 9.084034801226544e-05, |
|
"loss": 1.5892, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.7052168447517285, |
|
"grad_norm": 3.4863860607147217, |
|
"learning_rate": 9.05872086524089e-05, |
|
"loss": 1.4334, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.7058453802639849, |
|
"grad_norm": 2.361863613128662, |
|
"learning_rate": 9.0334230259953e-05, |
|
"loss": 1.3019, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.7064739157762414, |
|
"grad_norm": 1.9502878189086914, |
|
"learning_rate": 9.008141493254926e-05, |
|
"loss": 1.9009, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.7071024512884978, |
|
"grad_norm": 2.575131416320801, |
|
"learning_rate": 8.982876476649744e-05, |
|
"loss": 1.3239, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.7077309868007542, |
|
"grad_norm": 1.992423415184021, |
|
"learning_rate": 8.957628185672744e-05, |
|
"loss": 1.581, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.7083595223130107, |
|
"grad_norm": 2.4691596031188965, |
|
"learning_rate": 8.932396829678262e-05, |
|
"loss": 1.598, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.7089880578252671, |
|
"grad_norm": 5.816030025482178, |
|
"learning_rate": 8.907182617880187e-05, |
|
"loss": 1.3336, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.7096165933375236, |
|
"grad_norm": 2.062119483947754, |
|
"learning_rate": 8.881985759350269e-05, |
|
"loss": 1.4643, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.71024512884978, |
|
"grad_norm": 7.324100494384766, |
|
"learning_rate": 8.856806463016359e-05, |
|
"loss": 1.4673, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.7108736643620365, |
|
"grad_norm": 23.390262603759766, |
|
"learning_rate": 8.831644937660686e-05, |
|
"loss": 1.3248, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.7115021998742929, |
|
"grad_norm": 2.4310269355773926, |
|
"learning_rate": 8.80650139191813e-05, |
|
"loss": 1.2846, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.7121307353865494, |
|
"grad_norm": 2.6896767616271973, |
|
"learning_rate": 8.781376034274483e-05, |
|
"loss": 1.204, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.7127592708988058, |
|
"grad_norm": 2.0934154987335205, |
|
"learning_rate": 8.756269073064724e-05, |
|
"loss": 1.9249, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.7133878064110623, |
|
"grad_norm": 2.8560831546783447, |
|
"learning_rate": 8.731180716471296e-05, |
|
"loss": 2.0294, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.7140163419233186, |
|
"grad_norm": 2.889394521713257, |
|
"learning_rate": 8.706111172522373e-05, |
|
"loss": 1.5512, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.7146448774355751, |
|
"grad_norm": 2.4919700622558594, |
|
"learning_rate": 8.681060649090134e-05, |
|
"loss": 1.4552, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.7152734129478315, |
|
"grad_norm": 2.7027668952941895, |
|
"learning_rate": 8.656029353889055e-05, |
|
"loss": 1.3872, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.715901948460088, |
|
"grad_norm": 5.530666828155518, |
|
"learning_rate": 8.631017494474163e-05, |
|
"loss": 1.548, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.7165304839723444, |
|
"grad_norm": 2.3786208629608154, |
|
"learning_rate": 8.60602527823933e-05, |
|
"loss": 1.6342, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.7171590194846009, |
|
"grad_norm": 2.374307155609131, |
|
"learning_rate": 8.581052912415558e-05, |
|
"loss": 1.5062, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.7177875549968573, |
|
"grad_norm": 2.3763246536254883, |
|
"learning_rate": 8.556100604069238e-05, |
|
"loss": 1.8817, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.7184160905091138, |
|
"grad_norm": 2.234683036804199, |
|
"learning_rate": 8.531168560100469e-05, |
|
"loss": 1.4443, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.7190446260213702, |
|
"grad_norm": 2.030888795852661, |
|
"learning_rate": 8.506256987241295e-05, |
|
"loss": 1.6862, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.7196731615336267, |
|
"grad_norm": 9.853904724121094, |
|
"learning_rate": 8.481366092054044e-05, |
|
"loss": 1.2503, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.7203016970458831, |
|
"grad_norm": 2.712641954421997, |
|
"learning_rate": 8.456496080929564e-05, |
|
"loss": 1.1959, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.7209302325581395, |
|
"grad_norm": 20.679536819458008, |
|
"learning_rate": 8.43164716008556e-05, |
|
"loss": 1.382, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.721558768070396, |
|
"grad_norm": 2.233064889907837, |
|
"learning_rate": 8.406819535564833e-05, |
|
"loss": 1.2746, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.7221873035826524, |
|
"grad_norm": 3.22188138961792, |
|
"learning_rate": 8.38201341323363e-05, |
|
"loss": 1.7657, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.7228158390949089, |
|
"grad_norm": 7.27728796005249, |
|
"learning_rate": 8.357228998779878e-05, |
|
"loss": 1.5188, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7234443746071653, |
|
"grad_norm": 2.5036487579345703, |
|
"learning_rate": 8.332466497711525e-05, |
|
"loss": 1.6792, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.7240729101194218, |
|
"grad_norm": 2.007939577102661, |
|
"learning_rate": 8.307726115354804e-05, |
|
"loss": 1.841, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.7247014456316782, |
|
"grad_norm": 2.1606719493865967, |
|
"learning_rate": 8.283008056852548e-05, |
|
"loss": 1.8613, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.7253299811439347, |
|
"grad_norm": 7.078469753265381, |
|
"learning_rate": 8.258312527162495e-05, |
|
"loss": 1.3817, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.725958516656191, |
|
"grad_norm": 2.4328041076660156, |
|
"learning_rate": 8.233639731055558e-05, |
|
"loss": 1.9647, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.7265870521684475, |
|
"grad_norm": 2.694363594055176, |
|
"learning_rate": 8.208989873114165e-05, |
|
"loss": 1.9919, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.7272155876807039, |
|
"grad_norm": 2.6112589836120605, |
|
"learning_rate": 8.184363157730534e-05, |
|
"loss": 1.7735, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.7278441231929604, |
|
"grad_norm": 2.0942158699035645, |
|
"learning_rate": 8.159759789104997e-05, |
|
"loss": 1.5228, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.7284726587052168, |
|
"grad_norm": 2.0683867931365967, |
|
"learning_rate": 8.135179971244293e-05, |
|
"loss": 1.8875, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.7291011942174733, |
|
"grad_norm": 2.672105073928833, |
|
"learning_rate": 8.110623907959882e-05, |
|
"loss": 1.6535, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.7297297297297297, |
|
"grad_norm": 2.4173474311828613, |
|
"learning_rate": 8.086091802866262e-05, |
|
"loss": 1.5208, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.7303582652419862, |
|
"grad_norm": 2.0090785026550293, |
|
"learning_rate": 8.061583859379262e-05, |
|
"loss": 1.3517, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.7309868007542426, |
|
"grad_norm": 6.630021572113037, |
|
"learning_rate": 8.037100280714385e-05, |
|
"loss": 1.4237, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.7316153362664991, |
|
"grad_norm": 2.837871551513672, |
|
"learning_rate": 8.012641269885084e-05, |
|
"loss": 1.3769, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.7322438717787555, |
|
"grad_norm": 2.187551975250244, |
|
"learning_rate": 7.98820702970112e-05, |
|
"loss": 1.6888, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.732872407291012, |
|
"grad_norm": 2.1596717834472656, |
|
"learning_rate": 7.963797762766843e-05, |
|
"loss": 1.8048, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.7335009428032684, |
|
"grad_norm": 2.124009609222412, |
|
"learning_rate": 7.939413671479539e-05, |
|
"loss": 1.4541, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.7341294783155248, |
|
"grad_norm": 1.9929805994033813, |
|
"learning_rate": 7.915054958027747e-05, |
|
"loss": 2.0854, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.7347580138277813, |
|
"grad_norm": 5.274991512298584, |
|
"learning_rate": 7.890721824389565e-05, |
|
"loss": 1.6897, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.7353865493400377, |
|
"grad_norm": 2.200057029724121, |
|
"learning_rate": 7.866414472330995e-05, |
|
"loss": 1.928, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.7360150848522942, |
|
"grad_norm": 2.515014886856079, |
|
"learning_rate": 7.842133103404259e-05, |
|
"loss": 1.9697, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.7366436203645506, |
|
"grad_norm": 2.8547728061676025, |
|
"learning_rate": 7.817877918946138e-05, |
|
"loss": 1.3425, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.7372721558768071, |
|
"grad_norm": 5.4561028480529785, |
|
"learning_rate": 7.793649120076289e-05, |
|
"loss": 1.3061, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.7379006913890634, |
|
"grad_norm": 3.0161116123199463, |
|
"learning_rate": 7.769446907695587e-05, |
|
"loss": 1.7987, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.73852922690132, |
|
"grad_norm": 3.6751816272735596, |
|
"learning_rate": 7.745271482484457e-05, |
|
"loss": 1.6158, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.7391577624135763, |
|
"grad_norm": 5.799480438232422, |
|
"learning_rate": 7.721123044901213e-05, |
|
"loss": 1.5263, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.7397862979258328, |
|
"grad_norm": 2.4725875854492188, |
|
"learning_rate": 7.697001795180382e-05, |
|
"loss": 1.954, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.7404148334380892, |
|
"grad_norm": 3.724419593811035, |
|
"learning_rate": 7.672907933331068e-05, |
|
"loss": 1.7243, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.7410433689503457, |
|
"grad_norm": 2.5977113246917725, |
|
"learning_rate": 7.648841659135271e-05, |
|
"loss": 1.6016, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.7416719044626021, |
|
"grad_norm": 2.518479824066162, |
|
"learning_rate": 7.624803172146235e-05, |
|
"loss": 1.5951, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.7423004399748586, |
|
"grad_norm": 1.9670120477676392, |
|
"learning_rate": 7.600792671686811e-05, |
|
"loss": 1.9695, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.742928975487115, |
|
"grad_norm": 2.2110965251922607, |
|
"learning_rate": 7.576810356847777e-05, |
|
"loss": 1.4995, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.7435575109993715, |
|
"grad_norm": 2.224567413330078, |
|
"learning_rate": 7.55285642648621e-05, |
|
"loss": 1.6483, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.7441860465116279, |
|
"grad_norm": 2.1712052822113037, |
|
"learning_rate": 7.528931079223822e-05, |
|
"loss": 2.0525, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.7448145820238844, |
|
"grad_norm": 3.4237821102142334, |
|
"learning_rate": 7.505034513445321e-05, |
|
"loss": 1.3626, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.7454431175361408, |
|
"grad_norm": 2.2351598739624023, |
|
"learning_rate": 7.481166927296772e-05, |
|
"loss": 1.7577, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.7460716530483973, |
|
"grad_norm": 1.903164267539978, |
|
"learning_rate": 7.45732851868393e-05, |
|
"loss": 1.5872, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.7467001885606537, |
|
"grad_norm": 2.4990687370300293, |
|
"learning_rate": 7.433519485270633e-05, |
|
"loss": 1.4958, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.7473287240729101, |
|
"grad_norm": 2.5348727703094482, |
|
"learning_rate": 7.409740024477125e-05, |
|
"loss": 1.3709, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.7479572595851666, |
|
"grad_norm": 2.402555465698242, |
|
"learning_rate": 7.38599033347846e-05, |
|
"loss": 1.4448, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.748585795097423, |
|
"grad_norm": 2.4035086631774902, |
|
"learning_rate": 7.362270609202828e-05, |
|
"loss": 1.6504, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.7492143306096795, |
|
"grad_norm": 2.22576642036438, |
|
"learning_rate": 7.33858104832996e-05, |
|
"loss": 1.7383, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.7498428661219358, |
|
"grad_norm": 3.985947847366333, |
|
"learning_rate": 7.314921847289453e-05, |
|
"loss": 1.3207, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.7504714016341923, |
|
"grad_norm": 2.280792713165283, |
|
"learning_rate": 7.291293202259186e-05, |
|
"loss": 1.7222, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.7510999371464487, |
|
"grad_norm": 2.2812905311584473, |
|
"learning_rate": 7.267695309163664e-05, |
|
"loss": 1.579, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.7517284726587052, |
|
"grad_norm": 1.9620565176010132, |
|
"learning_rate": 7.244128363672407e-05, |
|
"loss": 1.9024, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.7523570081709616, |
|
"grad_norm": 6.212587833404541, |
|
"learning_rate": 7.220592561198321e-05, |
|
"loss": 1.5557, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.7529855436832181, |
|
"grad_norm": 2.4118034839630127, |
|
"learning_rate": 7.197088096896067e-05, |
|
"loss": 1.4873, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.7536140791954745, |
|
"grad_norm": 2.808126211166382, |
|
"learning_rate": 7.173615165660483e-05, |
|
"loss": 1.644, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.754242614707731, |
|
"grad_norm": 2.902484893798828, |
|
"learning_rate": 7.150173962124915e-05, |
|
"loss": 1.2501, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7548711502199874, |
|
"grad_norm": 1.9078620672225952, |
|
"learning_rate": 7.126764680659645e-05, |
|
"loss": 1.6844, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.7554996857322439, |
|
"grad_norm": 2.9469006061553955, |
|
"learning_rate": 7.103387515370247e-05, |
|
"loss": 1.6734, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.7561282212445003, |
|
"grad_norm": 3.9854254722595215, |
|
"learning_rate": 7.08004266009601e-05, |
|
"loss": 1.9626, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.7567567567567568, |
|
"grad_norm": 2.8919150829315186, |
|
"learning_rate": 7.056730308408308e-05, |
|
"loss": 1.6724, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.7573852922690132, |
|
"grad_norm": 2.811168670654297, |
|
"learning_rate": 7.033450653608995e-05, |
|
"loss": 1.9378, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.7580138277812697, |
|
"grad_norm": 2.1751866340637207, |
|
"learning_rate": 7.010203888728813e-05, |
|
"loss": 1.9443, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.7586423632935261, |
|
"grad_norm": 2.517366886138916, |
|
"learning_rate": 6.98699020652579e-05, |
|
"loss": 1.7793, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.7592708988057826, |
|
"grad_norm": 2.7467236518859863, |
|
"learning_rate": 6.963809799483637e-05, |
|
"loss": 1.2431, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.759899434318039, |
|
"grad_norm": 2.0183451175689697, |
|
"learning_rate": 6.940662859810143e-05, |
|
"loss": 1.8033, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.7605279698302954, |
|
"grad_norm": 2.6614110469818115, |
|
"learning_rate": 6.917549579435606e-05, |
|
"loss": 1.5114, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.7611565053425519, |
|
"grad_norm": 2.9125242233276367, |
|
"learning_rate": 6.894470150011221e-05, |
|
"loss": 1.7623, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.7617850408548082, |
|
"grad_norm": 2.589367151260376, |
|
"learning_rate": 6.871424762907497e-05, |
|
"loss": 1.3241, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.7624135763670647, |
|
"grad_norm": 9.632402420043945, |
|
"learning_rate": 6.84841360921267e-05, |
|
"loss": 1.4107, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.7630421118793211, |
|
"grad_norm": 9.185904502868652, |
|
"learning_rate": 6.825436879731116e-05, |
|
"loss": 1.498, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.7636706473915776, |
|
"grad_norm": 5.054088115692139, |
|
"learning_rate": 6.802494764981775e-05, |
|
"loss": 1.9881, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.764299182903834, |
|
"grad_norm": 1.954275131225586, |
|
"learning_rate": 6.77958745519657e-05, |
|
"loss": 1.546, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.7649277184160905, |
|
"grad_norm": 2.1610217094421387, |
|
"learning_rate": 6.756715140318826e-05, |
|
"loss": 1.705, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.7655562539283469, |
|
"grad_norm": 2.9864962100982666, |
|
"learning_rate": 6.733878010001689e-05, |
|
"loss": 1.8802, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.7661847894406034, |
|
"grad_norm": 45.979530334472656, |
|
"learning_rate": 6.711076253606572e-05, |
|
"loss": 1.9702, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.7668133249528598, |
|
"grad_norm": 2.3461523056030273, |
|
"learning_rate": 6.688310060201567e-05, |
|
"loss": 1.8765, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.7674418604651163, |
|
"grad_norm": 2.2091665267944336, |
|
"learning_rate": 6.665579618559885e-05, |
|
"loss": 1.5466, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.7680703959773727, |
|
"grad_norm": 2.7552435398101807, |
|
"learning_rate": 6.642885117158296e-05, |
|
"loss": 1.3857, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.7686989314896292, |
|
"grad_norm": 2.136584520339966, |
|
"learning_rate": 6.620226744175544e-05, |
|
"loss": 1.7707, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.7693274670018856, |
|
"grad_norm": 2.05024790763855, |
|
"learning_rate": 6.597604687490826e-05, |
|
"loss": 1.6121, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.7699560025141421, |
|
"grad_norm": 2.4084768295288086, |
|
"learning_rate": 6.575019134682188e-05, |
|
"loss": 1.8298, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.7705845380263985, |
|
"grad_norm": 6.111176490783691, |
|
"learning_rate": 6.552470273025005e-05, |
|
"loss": 1.7795, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.771213073538655, |
|
"grad_norm": 2.213745594024658, |
|
"learning_rate": 6.529958289490414e-05, |
|
"loss": 1.5378, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.7718416090509114, |
|
"grad_norm": 2.1072516441345215, |
|
"learning_rate": 6.507483370743766e-05, |
|
"loss": 1.8805, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.7724701445631679, |
|
"grad_norm": 2.7547993659973145, |
|
"learning_rate": 6.485045703143072e-05, |
|
"loss": 1.6539, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.7730986800754243, |
|
"grad_norm": 5.024652004241943, |
|
"learning_rate": 6.462645472737465e-05, |
|
"loss": 1.6199, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.7737272155876807, |
|
"grad_norm": 2.515493869781494, |
|
"learning_rate": 6.44028286526566e-05, |
|
"loss": 1.9624, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.7743557510999372, |
|
"grad_norm": 2.165681838989258, |
|
"learning_rate": 6.417958066154404e-05, |
|
"loss": 1.8711, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.7749842866121935, |
|
"grad_norm": 2.589289903640747, |
|
"learning_rate": 6.395671260516953e-05, |
|
"loss": 1.5007, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.77561282212445, |
|
"grad_norm": 2.37497615814209, |
|
"learning_rate": 6.373422633151512e-05, |
|
"loss": 1.3228, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.7762413576367064, |
|
"grad_norm": 2.4158599376678467, |
|
"learning_rate": 6.351212368539732e-05, |
|
"loss": 1.7429, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.7768698931489629, |
|
"grad_norm": 2.458102226257324, |
|
"learning_rate": 6.329040650845161e-05, |
|
"loss": 1.6967, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.7774984286612193, |
|
"grad_norm": 2.7537758350372314, |
|
"learning_rate": 6.30690766391173e-05, |
|
"loss": 2.0286, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.7781269641734758, |
|
"grad_norm": 2.001431941986084, |
|
"learning_rate": 6.284813591262206e-05, |
|
"loss": 1.5409, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.7787554996857322, |
|
"grad_norm": 3.236192464828491, |
|
"learning_rate": 6.262758616096698e-05, |
|
"loss": 1.5151, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.7793840351979887, |
|
"grad_norm": 3.050147771835327, |
|
"learning_rate": 6.240742921291133e-05, |
|
"loss": 1.5089, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7800125707102451, |
|
"grad_norm": 2.5074894428253174, |
|
"learning_rate": 6.218766689395712e-05, |
|
"loss": 1.5258, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.7806411062225016, |
|
"grad_norm": 2.85343599319458, |
|
"learning_rate": 6.196830102633433e-05, |
|
"loss": 1.6458, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.781269641734758, |
|
"grad_norm": 2.2203330993652344, |
|
"learning_rate": 6.174933342898549e-05, |
|
"loss": 1.838, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.7818981772470145, |
|
"grad_norm": 2.332676649093628, |
|
"learning_rate": 6.153076591755094e-05, |
|
"loss": 1.056, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.7825267127592709, |
|
"grad_norm": 2.2264132499694824, |
|
"learning_rate": 6.131260030435341e-05, |
|
"loss": 1.6193, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.7831552482715274, |
|
"grad_norm": 7.7264790534973145, |
|
"learning_rate": 6.109483839838324e-05, |
|
"loss": 1.5639, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.7837837837837838, |
|
"grad_norm": 2.1555936336517334, |
|
"learning_rate": 6.087748200528323e-05, |
|
"loss": 1.946, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.7844123192960403, |
|
"grad_norm": 4.1040449142456055, |
|
"learning_rate": 6.0660532927333846e-05, |
|
"loss": 1.5472, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.7850408548082967, |
|
"grad_norm": 3.6039717197418213, |
|
"learning_rate": 6.044399296343817e-05, |
|
"loss": 1.3412, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.7856693903205532, |
|
"grad_norm": 2.8398213386535645, |
|
"learning_rate": 6.022786390910679e-05, |
|
"loss": 1.3869, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.7862979258328096, |
|
"grad_norm": 2.711820602416992, |
|
"learning_rate": 6.001214755644334e-05, |
|
"loss": 1.507, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.7869264613450659, |
|
"grad_norm": 2.033348560333252, |
|
"learning_rate": 5.97968456941293e-05, |
|
"loss": 1.7967, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.7875549968573224, |
|
"grad_norm": 3.0147345066070557, |
|
"learning_rate": 5.9581960107409284e-05, |
|
"loss": 1.5409, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.7881835323695788, |
|
"grad_norm": 3.2906253337860107, |
|
"learning_rate": 5.9367492578076177e-05, |
|
"loss": 1.4774, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.7888120678818353, |
|
"grad_norm": 2.165283441543579, |
|
"learning_rate": 5.915344488445649e-05, |
|
"loss": 1.7004, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.7894406033940917, |
|
"grad_norm": 3.209404230117798, |
|
"learning_rate": 5.8939818801395466e-05, |
|
"loss": 1.4982, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.7900691389063482, |
|
"grad_norm": 3.1273396015167236, |
|
"learning_rate": 5.872661610024246e-05, |
|
"loss": 1.6924, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.7906976744186046, |
|
"grad_norm": 11.083939552307129, |
|
"learning_rate": 5.8513838548836244e-05, |
|
"loss": 1.5158, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.7913262099308611, |
|
"grad_norm": 4.628045558929443, |
|
"learning_rate": 5.830148791149022e-05, |
|
"loss": 1.7336, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.7919547454431175, |
|
"grad_norm": 4.586468696594238, |
|
"learning_rate": 5.808956594897803e-05, |
|
"loss": 1.1037, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.792583280955374, |
|
"grad_norm": 2.308030366897583, |
|
"learning_rate": 5.7878074418518735e-05, |
|
"loss": 1.635, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.7932118164676304, |
|
"grad_norm": 2.361595630645752, |
|
"learning_rate": 5.766701507376239e-05, |
|
"loss": 1.7493, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.7938403519798869, |
|
"grad_norm": 1.6703816652297974, |
|
"learning_rate": 5.7456389664775334e-05, |
|
"loss": 1.9031, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.7944688874921433, |
|
"grad_norm": 2.3605315685272217, |
|
"learning_rate": 5.7246199938025866e-05, |
|
"loss": 1.7527, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.7950974230043998, |
|
"grad_norm": 2.820552110671997, |
|
"learning_rate": 5.703644763636979e-05, |
|
"loss": 1.6964, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.7957259585166562, |
|
"grad_norm": 7.11862850189209, |
|
"learning_rate": 5.682713449903564e-05, |
|
"loss": 1.472, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.7963544940289127, |
|
"grad_norm": 2.269716739654541, |
|
"learning_rate": 5.661826226161062e-05, |
|
"loss": 1.4142, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.7969830295411691, |
|
"grad_norm": 8.061915397644043, |
|
"learning_rate": 5.640983265602604e-05, |
|
"loss": 1.6204, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.7976115650534256, |
|
"grad_norm": 2.36787748336792, |
|
"learning_rate": 5.6201847410543015e-05, |
|
"loss": 1.4546, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.798240100565682, |
|
"grad_norm": 2.092968702316284, |
|
"learning_rate": 5.5994308249738004e-05, |
|
"loss": 1.5161, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.7988686360779385, |
|
"grad_norm": 2.0657472610473633, |
|
"learning_rate": 5.578721689448872e-05, |
|
"loss": 1.4564, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.7994971715901948, |
|
"grad_norm": 2.8852896690368652, |
|
"learning_rate": 5.558057506195973e-05, |
|
"loss": 1.2178, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.8001257071024512, |
|
"grad_norm": 3.4546632766723633, |
|
"learning_rate": 5.537438446558818e-05, |
|
"loss": 1.4585, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.8007542426147077, |
|
"grad_norm": 2.654740810394287, |
|
"learning_rate": 5.516864681506977e-05, |
|
"loss": 1.861, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.8013827781269641, |
|
"grad_norm": 2.5231709480285645, |
|
"learning_rate": 5.496336381634428e-05, |
|
"loss": 1.2572, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.8020113136392206, |
|
"grad_norm": 3.1098735332489014, |
|
"learning_rate": 5.4758537171581745e-05, |
|
"loss": 1.6418, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.802639849151477, |
|
"grad_norm": 2.955645799636841, |
|
"learning_rate": 5.455416857916811e-05, |
|
"loss": 1.5669, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.8032683846637335, |
|
"grad_norm": 2.7114450931549072, |
|
"learning_rate": 5.435025973369132e-05, |
|
"loss": 1.4713, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.8038969201759899, |
|
"grad_norm": 1.9256415367126465, |
|
"learning_rate": 5.4146812325927044e-05, |
|
"loss": 2.0088, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.8045254556882464, |
|
"grad_norm": 2.9292759895324707, |
|
"learning_rate": 5.3943828042824915e-05, |
|
"loss": 1.4572, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.8045254556882464, |
|
"eval_loss": 0.9861343502998352, |
|
"eval_runtime": 1571.9717, |
|
"eval_samples_per_second": 1.64, |
|
"eval_steps_per_second": 1.64, |
|
"step": 1280 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1591, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 160, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.6368396617352479e+19, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|