|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.975794251134644, |
|
"eval_steps": 500, |
|
"global_step": 1320, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0045385779122541605, |
|
"grad_norm": 12.459190368652344, |
|
"learning_rate": 5.0000000000000004e-08, |
|
"loss": 4.6369, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009077155824508321, |
|
"grad_norm": 12.756917953491211, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 4.6738, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01361573373676248, |
|
"grad_norm": 15.50593090057373, |
|
"learning_rate": 1.5000000000000002e-07, |
|
"loss": 4.6681, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.018154311649016642, |
|
"grad_norm": 12.976861000061035, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 4.246, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0226928895612708, |
|
"grad_norm": 14.594775199890137, |
|
"learning_rate": 2.5000000000000004e-07, |
|
"loss": 4.4452, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02723146747352496, |
|
"grad_norm": 16.087888717651367, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 4.5528, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03177004538577912, |
|
"grad_norm": 13.62125301361084, |
|
"learning_rate": 3.5000000000000004e-07, |
|
"loss": 4.5628, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.036308623298033284, |
|
"grad_norm": 17.010616302490234, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 4.5768, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04084720121028744, |
|
"grad_norm": 14.567586898803711, |
|
"learning_rate": 4.5000000000000003e-07, |
|
"loss": 4.3937, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0453857791225416, |
|
"grad_norm": 15.238479614257812, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 4.495, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.049924357034795766, |
|
"grad_norm": 13.89387321472168, |
|
"learning_rate": 5.5e-07, |
|
"loss": 4.4705, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05446293494704992, |
|
"grad_norm": 13.949710845947266, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 4.328, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.059001512859304085, |
|
"grad_norm": 13.15122127532959, |
|
"learning_rate": 6.5e-07, |
|
"loss": 4.4296, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06354009077155824, |
|
"grad_norm": 14.076905250549316, |
|
"learning_rate": 7.000000000000001e-07, |
|
"loss": 4.5285, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0680786686838124, |
|
"grad_norm": 14.299891471862793, |
|
"learning_rate": 7.5e-07, |
|
"loss": 4.5849, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07261724659606657, |
|
"grad_norm": 13.573644638061523, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 4.3638, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07715582450832073, |
|
"grad_norm": 14.140484809875488, |
|
"learning_rate": 8.500000000000001e-07, |
|
"loss": 4.4449, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08169440242057488, |
|
"grad_norm": 11.941351890563965, |
|
"learning_rate": 9.000000000000001e-07, |
|
"loss": 4.3646, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.08623298033282904, |
|
"grad_norm": 10.630327224731445, |
|
"learning_rate": 9.500000000000001e-07, |
|
"loss": 4.2902, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0907715582450832, |
|
"grad_norm": 11.662637710571289, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 4.1961, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09531013615733737, |
|
"grad_norm": 12.008113861083984, |
|
"learning_rate": 1.0500000000000001e-06, |
|
"loss": 4.1941, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.09984871406959153, |
|
"grad_norm": 12.686023712158203, |
|
"learning_rate": 1.1e-06, |
|
"loss": 4.1346, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.1043872919818457, |
|
"grad_norm": 12.529243469238281, |
|
"learning_rate": 1.1500000000000002e-06, |
|
"loss": 4.1137, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.10892586989409984, |
|
"grad_norm": 11.19096565246582, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 4.0772, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.11346444780635401, |
|
"grad_norm": 11.182024955749512, |
|
"learning_rate": 1.25e-06, |
|
"loss": 3.8888, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.11800302571860817, |
|
"grad_norm": 9.899381637573242, |
|
"learning_rate": 1.3e-06, |
|
"loss": 4.0117, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.12254160363086233, |
|
"grad_norm": 9.448798179626465, |
|
"learning_rate": 1.3500000000000002e-06, |
|
"loss": 3.8384, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.12708018154311648, |
|
"grad_norm": 10.632583618164062, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 3.7857, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.13161875945537066, |
|
"grad_norm": 10.56238079071045, |
|
"learning_rate": 1.45e-06, |
|
"loss": 3.4783, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1361573373676248, |
|
"grad_norm": 12.410117149353027, |
|
"learning_rate": 1.5e-06, |
|
"loss": 3.4828, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.14069591527987896, |
|
"grad_norm": 10.183599472045898, |
|
"learning_rate": 1.5500000000000002e-06, |
|
"loss": 3.6337, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.14523449319213314, |
|
"grad_norm": 9.421585083007812, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 3.4453, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.14977307110438728, |
|
"grad_norm": 9.230025291442871, |
|
"learning_rate": 1.6500000000000003e-06, |
|
"loss": 3.3481, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.15431164901664146, |
|
"grad_norm": 8.295567512512207, |
|
"learning_rate": 1.7000000000000002e-06, |
|
"loss": 3.3145, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.1588502269288956, |
|
"grad_norm": 9.138203620910645, |
|
"learning_rate": 1.75e-06, |
|
"loss": 3.1872, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.16338880484114976, |
|
"grad_norm": 11.864872932434082, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 2.9837, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.16792738275340394, |
|
"grad_norm": 12.373150825500488, |
|
"learning_rate": 1.85e-06, |
|
"loss": 3.0049, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.17246596066565809, |
|
"grad_norm": 21.665483474731445, |
|
"learning_rate": 1.9000000000000002e-06, |
|
"loss": 2.7664, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.17700453857791226, |
|
"grad_norm": 22.663740158081055, |
|
"learning_rate": 1.9500000000000004e-06, |
|
"loss": 2.7272, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1815431164901664, |
|
"grad_norm": 26.674400329589844, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.8077, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.18608169440242056, |
|
"grad_norm": 28.503612518310547, |
|
"learning_rate": 2.05e-06, |
|
"loss": 2.7306, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.19062027231467474, |
|
"grad_norm": 29.03409767150879, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 2.7405, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1951588502269289, |
|
"grad_norm": 17.844894409179688, |
|
"learning_rate": 2.15e-06, |
|
"loss": 2.6464, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.19969742813918306, |
|
"grad_norm": 19.220829010009766, |
|
"learning_rate": 2.2e-06, |
|
"loss": 2.4554, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.2042360060514372, |
|
"grad_norm": 25.161415100097656, |
|
"learning_rate": 2.25e-06, |
|
"loss": 2.2773, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2087745839636914, |
|
"grad_norm": 17.46828269958496, |
|
"learning_rate": 2.3000000000000004e-06, |
|
"loss": 2.3392, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.21331316187594554, |
|
"grad_norm": 13.194672584533691, |
|
"learning_rate": 2.35e-06, |
|
"loss": 2.0409, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.2178517397881997, |
|
"grad_norm": 7.290500164031982, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 2.1879, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.22239031770045387, |
|
"grad_norm": 7.56943941116333, |
|
"learning_rate": 2.4500000000000003e-06, |
|
"loss": 2.1656, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.22692889561270801, |
|
"grad_norm": 8.335527420043945, |
|
"learning_rate": 2.5e-06, |
|
"loss": 2.1294, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2314674735249622, |
|
"grad_norm": 10.013853073120117, |
|
"learning_rate": 2.55e-06, |
|
"loss": 2.1816, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.23600605143721634, |
|
"grad_norm": 8.674482345581055, |
|
"learning_rate": 2.6e-06, |
|
"loss": 2.0078, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.2405446293494705, |
|
"grad_norm": 6.09174919128418, |
|
"learning_rate": 2.6500000000000005e-06, |
|
"loss": 2.1328, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.24508320726172467, |
|
"grad_norm": 7.3330488204956055, |
|
"learning_rate": 2.7000000000000004e-06, |
|
"loss": 1.8961, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.24962178517397882, |
|
"grad_norm": 8.764911651611328, |
|
"learning_rate": 2.7500000000000004e-06, |
|
"loss": 1.7026, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.25416036308623297, |
|
"grad_norm": 12.413115501403809, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.8172, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.2586989409984871, |
|
"grad_norm": 9.009276390075684, |
|
"learning_rate": 2.85e-06, |
|
"loss": 1.6527, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2632375189107413, |
|
"grad_norm": 9.965579986572266, |
|
"learning_rate": 2.9e-06, |
|
"loss": 1.6348, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.26777609682299547, |
|
"grad_norm": 7.091963768005371, |
|
"learning_rate": 2.95e-06, |
|
"loss": 1.7016, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2723146747352496, |
|
"grad_norm": 5.276648998260498, |
|
"learning_rate": 3e-06, |
|
"loss": 1.7088, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.27685325264750377, |
|
"grad_norm": 9.634596824645996, |
|
"learning_rate": 3.05e-06, |
|
"loss": 1.4961, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2813918305597579, |
|
"grad_norm": 6.663750648498535, |
|
"learning_rate": 3.1000000000000004e-06, |
|
"loss": 1.522, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2859304084720121, |
|
"grad_norm": 36.79426956176758, |
|
"learning_rate": 3.1500000000000003e-06, |
|
"loss": 1.4855, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.29046898638426627, |
|
"grad_norm": 31.88567352294922, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.5058, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.2950075642965204, |
|
"grad_norm": 12.319960594177246, |
|
"learning_rate": 3.2500000000000002e-06, |
|
"loss": 1.4547, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.29954614220877457, |
|
"grad_norm": 4.246046543121338, |
|
"learning_rate": 3.3000000000000006e-06, |
|
"loss": 1.3153, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3040847201210287, |
|
"grad_norm": 5.3949503898620605, |
|
"learning_rate": 3.3500000000000005e-06, |
|
"loss": 1.4237, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3086232980332829, |
|
"grad_norm": 9.975737571716309, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 1.327, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.31316187594553707, |
|
"grad_norm": 6.356144428253174, |
|
"learning_rate": 3.45e-06, |
|
"loss": 1.2685, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.3177004538577912, |
|
"grad_norm": 3.9029836654663086, |
|
"learning_rate": 3.5e-06, |
|
"loss": 1.3856, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.32223903177004537, |
|
"grad_norm": 4.215930938720703, |
|
"learning_rate": 3.5500000000000003e-06, |
|
"loss": 1.1844, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3267776096822995, |
|
"grad_norm": 27.889633178710938, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.2535, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3313161875945537, |
|
"grad_norm": 23.77273178100586, |
|
"learning_rate": 3.65e-06, |
|
"loss": 1.3189, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3358547655068079, |
|
"grad_norm": 9.203927040100098, |
|
"learning_rate": 3.7e-06, |
|
"loss": 1.1993, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.340393343419062, |
|
"grad_norm": 3.1091806888580322, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 1.1804, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.34493192133131617, |
|
"grad_norm": 9.0440034866333, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 1.2318, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3494704992435703, |
|
"grad_norm": 7.17051362991333, |
|
"learning_rate": 3.85e-06, |
|
"loss": 1.2018, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3540090771558245, |
|
"grad_norm": 8.164457321166992, |
|
"learning_rate": 3.900000000000001e-06, |
|
"loss": 1.0911, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.3585476550680787, |
|
"grad_norm": 3.3671042919158936, |
|
"learning_rate": 3.95e-06, |
|
"loss": 1.1747, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.3630862329803328, |
|
"grad_norm": 4.223191261291504, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.1141, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.367624810892587, |
|
"grad_norm": 10.657241821289062, |
|
"learning_rate": 4.05e-06, |
|
"loss": 1.0506, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3721633888048411, |
|
"grad_norm": 2.664783239364624, |
|
"learning_rate": 4.1e-06, |
|
"loss": 1.1271, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3767019667170953, |
|
"grad_norm": 5.334985733032227, |
|
"learning_rate": 4.15e-06, |
|
"loss": 1.0812, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3812405446293495, |
|
"grad_norm": 7.471070289611816, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 1.1027, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.3857791225416036, |
|
"grad_norm": 3.270421028137207, |
|
"learning_rate": 4.25e-06, |
|
"loss": 1.054, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3903177004538578, |
|
"grad_norm": 1.9382085800170898, |
|
"learning_rate": 4.3e-06, |
|
"loss": 0.975, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.394856278366112, |
|
"grad_norm": 24.2485408782959, |
|
"learning_rate": 4.350000000000001e-06, |
|
"loss": 1.0288, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.39939485627836613, |
|
"grad_norm": 42.359432220458984, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.111, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.4039334341906203, |
|
"grad_norm": 25.72220230102539, |
|
"learning_rate": 4.450000000000001e-06, |
|
"loss": 1.0472, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.4084720121028744, |
|
"grad_norm": 3.1196579933166504, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.0055, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4130105900151286, |
|
"grad_norm": 4.584676742553711, |
|
"learning_rate": 4.5500000000000005e-06, |
|
"loss": 0.9848, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.4175491679273828, |
|
"grad_norm": 6.140016078948975, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 0.948, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.42208774583963693, |
|
"grad_norm": 5.266363620758057, |
|
"learning_rate": 4.65e-06, |
|
"loss": 0.9261, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.4266263237518911, |
|
"grad_norm": 3.6768720149993896, |
|
"learning_rate": 4.7e-06, |
|
"loss": 0.9772, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.43116490166414523, |
|
"grad_norm": 8.903961181640625, |
|
"learning_rate": 4.75e-06, |
|
"loss": 0.9514, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4357034795763994, |
|
"grad_norm": 15.684305191040039, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.9629, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.4402420574886536, |
|
"grad_norm": 21.329519271850586, |
|
"learning_rate": 4.85e-06, |
|
"loss": 1.0118, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.44478063540090773, |
|
"grad_norm": 7.824005603790283, |
|
"learning_rate": 4.9000000000000005e-06, |
|
"loss": 0.9094, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4493192133131619, |
|
"grad_norm": 2.283174514770508, |
|
"learning_rate": 4.95e-06, |
|
"loss": 0.8877, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.45385779122541603, |
|
"grad_norm": 5.028634548187256, |
|
"learning_rate": 5e-06, |
|
"loss": 0.8946, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4583963691376702, |
|
"grad_norm": 6.072951793670654, |
|
"learning_rate": 4.9999917112344245e-06, |
|
"loss": 0.9248, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.4629349470499244, |
|
"grad_norm": 7.111969470977783, |
|
"learning_rate": 4.999966844992657e-06, |
|
"loss": 0.9172, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.46747352496217853, |
|
"grad_norm": 3.792372465133667, |
|
"learning_rate": 4.999925401439588e-06, |
|
"loss": 0.8718, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4720121028744327, |
|
"grad_norm": 4.017702579498291, |
|
"learning_rate": 4.999867380850031e-06, |
|
"loss": 0.9121, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.47655068078668683, |
|
"grad_norm": 6.903019905090332, |
|
"learning_rate": 4.99979278360872e-06, |
|
"loss": 0.9359, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.481089258698941, |
|
"grad_norm": 3.740607500076294, |
|
"learning_rate": 4.999701610210309e-06, |
|
"loss": 0.8474, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.4856278366111952, |
|
"grad_norm": 3.379190683364868, |
|
"learning_rate": 4.999593861259373e-06, |
|
"loss": 0.8641, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.49016641452344933, |
|
"grad_norm": 3.0796492099761963, |
|
"learning_rate": 4.999469537470394e-06, |
|
"loss": 0.8255, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.4947049924357035, |
|
"grad_norm": 3.3708953857421875, |
|
"learning_rate": 4.999328639667765e-06, |
|
"loss": 0.8823, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.49924357034795763, |
|
"grad_norm": 3.2106549739837646, |
|
"learning_rate": 4.999171168785783e-06, |
|
"loss": 0.8584, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5037821482602118, |
|
"grad_norm": 6.520877838134766, |
|
"learning_rate": 4.998997125868638e-06, |
|
"loss": 0.8718, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5083207261724659, |
|
"grad_norm": 1.0179104804992676, |
|
"learning_rate": 4.99880651207041e-06, |
|
"loss": 0.871, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5128593040847201, |
|
"grad_norm": 1.1883283853530884, |
|
"learning_rate": 4.998599328655063e-06, |
|
"loss": 0.8689, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5173978819969742, |
|
"grad_norm": 1.2722241878509521, |
|
"learning_rate": 4.998375576996431e-06, |
|
"loss": 0.8594, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.5219364599092284, |
|
"grad_norm": 1.8931000232696533, |
|
"learning_rate": 4.9981352585782154e-06, |
|
"loss": 0.8327, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5264750378214826, |
|
"grad_norm": 3.093480348587036, |
|
"learning_rate": 4.997878374993971e-06, |
|
"loss": 0.8372, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.5310136157337367, |
|
"grad_norm": 5.2650957107543945, |
|
"learning_rate": 4.9976049279470955e-06, |
|
"loss": 0.8386, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5355521936459909, |
|
"grad_norm": 4.467101573944092, |
|
"learning_rate": 4.997314919250818e-06, |
|
"loss": 0.7788, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.540090771558245, |
|
"grad_norm": 3.614868640899658, |
|
"learning_rate": 4.997008350828192e-06, |
|
"loss": 0.874, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5446293494704992, |
|
"grad_norm": 12.209319114685059, |
|
"learning_rate": 4.996685224712077e-06, |
|
"loss": 0.8223, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5491679273827534, |
|
"grad_norm": 4.1903252601623535, |
|
"learning_rate": 4.9963455430451245e-06, |
|
"loss": 0.8455, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5537065052950075, |
|
"grad_norm": 2.649549722671509, |
|
"learning_rate": 4.9959893080797675e-06, |
|
"loss": 0.7875, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5582450832072617, |
|
"grad_norm": 4.935393810272217, |
|
"learning_rate": 4.995616522178207e-06, |
|
"loss": 0.8691, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5627836611195158, |
|
"grad_norm": 5.565356731414795, |
|
"learning_rate": 4.995227187812389e-06, |
|
"loss": 0.8267, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.56732223903177, |
|
"grad_norm": 5.23801851272583, |
|
"learning_rate": 4.994821307563995e-06, |
|
"loss": 0.826, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5718608169440242, |
|
"grad_norm": 4.2979736328125, |
|
"learning_rate": 4.994398884124422e-06, |
|
"loss": 0.8068, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5763993948562783, |
|
"grad_norm": 6.285053730010986, |
|
"learning_rate": 4.993959920294764e-06, |
|
"loss": 0.819, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5809379727685325, |
|
"grad_norm": 10.453060150146484, |
|
"learning_rate": 4.9935044189857975e-06, |
|
"loss": 0.773, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5854765506807866, |
|
"grad_norm": 31.77552604675293, |
|
"learning_rate": 4.993032383217957e-06, |
|
"loss": 0.8738, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5900151285930408, |
|
"grad_norm": 1.1456351280212402, |
|
"learning_rate": 4.992543816121317e-06, |
|
"loss": 0.7518, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.594553706505295, |
|
"grad_norm": 1.3343666791915894, |
|
"learning_rate": 4.992038720935572e-06, |
|
"loss": 0.8108, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5990922844175491, |
|
"grad_norm": 0.8912076354026794, |
|
"learning_rate": 4.991517101010015e-06, |
|
"loss": 0.8159, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6036308623298033, |
|
"grad_norm": 2.249366044998169, |
|
"learning_rate": 4.990978959803513e-06, |
|
"loss": 0.8124, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.6081694402420574, |
|
"grad_norm": 1.2822734117507935, |
|
"learning_rate": 4.990424300884488e-06, |
|
"loss": 0.8213, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.6127080181543116, |
|
"grad_norm": 6.053490161895752, |
|
"learning_rate": 4.98985312793089e-06, |
|
"loss": 0.7829, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6172465960665658, |
|
"grad_norm": 2.2606236934661865, |
|
"learning_rate": 4.989265444730176e-06, |
|
"loss": 0.7729, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6217851739788199, |
|
"grad_norm": 3.8894989490509033, |
|
"learning_rate": 4.988661255179276e-06, |
|
"loss": 0.774, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6263237518910741, |
|
"grad_norm": 5.648194313049316, |
|
"learning_rate": 4.988040563284582e-06, |
|
"loss": 0.7251, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6308623298033282, |
|
"grad_norm": 1.7583339214324951, |
|
"learning_rate": 4.98740337316191e-06, |
|
"loss": 0.7875, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.6354009077155824, |
|
"grad_norm": 1.3959944248199463, |
|
"learning_rate": 4.9867496890364734e-06, |
|
"loss": 0.7726, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6399394856278366, |
|
"grad_norm": 1.7879443168640137, |
|
"learning_rate": 4.986079515242861e-06, |
|
"loss": 0.786, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6444780635400907, |
|
"grad_norm": 2.0919816493988037, |
|
"learning_rate": 4.985392856225003e-06, |
|
"loss": 0.7802, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.649016641452345, |
|
"grad_norm": 1.272477626800537, |
|
"learning_rate": 4.984689716536145e-06, |
|
"loss": 0.7842, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.653555219364599, |
|
"grad_norm": 1.1265331506729126, |
|
"learning_rate": 4.983970100838814e-06, |
|
"loss": 0.736, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.6580937972768532, |
|
"grad_norm": 0.8514362573623657, |
|
"learning_rate": 4.983234013904791e-06, |
|
"loss": 0.749, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6626323751891074, |
|
"grad_norm": 3.148453950881958, |
|
"learning_rate": 4.9824814606150774e-06, |
|
"loss": 0.7884, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6671709531013615, |
|
"grad_norm": 1.531554937362671, |
|
"learning_rate": 4.981712445959864e-06, |
|
"loss": 0.767, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6717095310136157, |
|
"grad_norm": 1.7979401350021362, |
|
"learning_rate": 4.980926975038496e-06, |
|
"loss": 0.7575, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6762481089258698, |
|
"grad_norm": 1.1131621599197388, |
|
"learning_rate": 4.9801250530594415e-06, |
|
"loss": 0.76, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.680786686838124, |
|
"grad_norm": 1.2112400531768799, |
|
"learning_rate": 4.9793066853402535e-06, |
|
"loss": 0.769, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6853252647503782, |
|
"grad_norm": 1.276172161102295, |
|
"learning_rate": 4.978471877307541e-06, |
|
"loss": 0.7641, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6898638426626323, |
|
"grad_norm": 7.614717960357666, |
|
"learning_rate": 4.977620634496926e-06, |
|
"loss": 0.7614, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.6944024205748865, |
|
"grad_norm": 0.9541272521018982, |
|
"learning_rate": 4.976752962553008e-06, |
|
"loss": 0.7406, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6989409984871406, |
|
"grad_norm": 0.9793027639389038, |
|
"learning_rate": 4.975868867229332e-06, |
|
"loss": 0.7538, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.7034795763993948, |
|
"grad_norm": 8.540267944335938, |
|
"learning_rate": 4.974968354388346e-06, |
|
"loss": 0.7616, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.708018154311649, |
|
"grad_norm": 2.279240608215332, |
|
"learning_rate": 4.97405143000136e-06, |
|
"loss": 0.7157, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.7125567322239031, |
|
"grad_norm": 2.947227716445923, |
|
"learning_rate": 4.973118100148513e-06, |
|
"loss": 0.7348, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.7170953101361573, |
|
"grad_norm": 2.211785316467285, |
|
"learning_rate": 4.9721683710187255e-06, |
|
"loss": 0.7144, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.7216338880484114, |
|
"grad_norm": 1.3755372762680054, |
|
"learning_rate": 4.971202248909662e-06, |
|
"loss": 0.6857, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.7261724659606656, |
|
"grad_norm": 2.564708709716797, |
|
"learning_rate": 4.970219740227693e-06, |
|
"loss": 0.7124, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7307110438729199, |
|
"grad_norm": 11.76566219329834, |
|
"learning_rate": 4.9692208514878445e-06, |
|
"loss": 0.7815, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.735249621785174, |
|
"grad_norm": 3.7665200233459473, |
|
"learning_rate": 4.9682055893137605e-06, |
|
"loss": 0.7021, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.7397881996974282, |
|
"grad_norm": 2.9076344966888428, |
|
"learning_rate": 4.967173960437657e-06, |
|
"loss": 0.7083, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.7443267776096822, |
|
"grad_norm": 2.336026191711426, |
|
"learning_rate": 4.966125971700277e-06, |
|
"loss": 0.7455, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.7488653555219364, |
|
"grad_norm": 6.48813009262085, |
|
"learning_rate": 4.965061630050848e-06, |
|
"loss": 0.7628, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.7534039334341907, |
|
"grad_norm": 1.9123872518539429, |
|
"learning_rate": 4.9639809425470324e-06, |
|
"loss": 0.7212, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.7579425113464447, |
|
"grad_norm": 6.638424873352051, |
|
"learning_rate": 4.962883916354882e-06, |
|
"loss": 0.7523, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.762481089258699, |
|
"grad_norm": 1.3606716394424438, |
|
"learning_rate": 4.961770558748793e-06, |
|
"loss": 0.7651, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.7670196671709532, |
|
"grad_norm": 1.2726523876190186, |
|
"learning_rate": 4.960640877111451e-06, |
|
"loss": 0.7729, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.7715582450832073, |
|
"grad_norm": 0.9216910600662231, |
|
"learning_rate": 4.959494878933792e-06, |
|
"loss": 0.7162, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7760968229954615, |
|
"grad_norm": 2.3702430725097656, |
|
"learning_rate": 4.958332571814941e-06, |
|
"loss": 0.7038, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.7806354009077155, |
|
"grad_norm": 4.6101555824279785, |
|
"learning_rate": 4.957153963462172e-06, |
|
"loss": 0.7141, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.7851739788199698, |
|
"grad_norm": 1.7314170598983765, |
|
"learning_rate": 4.955959061690853e-06, |
|
"loss": 0.7374, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.789712556732224, |
|
"grad_norm": 2.91019868850708, |
|
"learning_rate": 4.9547478744243914e-06, |
|
"loss": 0.7048, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.794251134644478, |
|
"grad_norm": 2.872775077819824, |
|
"learning_rate": 4.953520409694186e-06, |
|
"loss": 0.6912, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7987897125567323, |
|
"grad_norm": 2.2696948051452637, |
|
"learning_rate": 4.952276675639569e-06, |
|
"loss": 0.7432, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.8033282904689864, |
|
"grad_norm": 1.8855810165405273, |
|
"learning_rate": 4.951016680507757e-06, |
|
"loss": 0.7056, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.8078668683812406, |
|
"grad_norm": 5.202772617340088, |
|
"learning_rate": 4.9497404326537954e-06, |
|
"loss": 0.7114, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.8124054462934948, |
|
"grad_norm": 0.7916449904441833, |
|
"learning_rate": 4.948447940540497e-06, |
|
"loss": 0.7202, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.8169440242057489, |
|
"grad_norm": 1.5009609460830688, |
|
"learning_rate": 4.947139212738395e-06, |
|
"loss": 0.7245, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8214826021180031, |
|
"grad_norm": 1.858067512512207, |
|
"learning_rate": 4.945814257925679e-06, |
|
"loss": 0.6962, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.8260211800302572, |
|
"grad_norm": 0.7835391163825989, |
|
"learning_rate": 4.94447308488814e-06, |
|
"loss": 0.6875, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.8305597579425114, |
|
"grad_norm": 21.755929946899414, |
|
"learning_rate": 4.943115702519115e-06, |
|
"loss": 0.7304, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.8350983358547656, |
|
"grad_norm": 6.944667816162109, |
|
"learning_rate": 4.941742119819421e-06, |
|
"loss": 0.7381, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.8396369137670197, |
|
"grad_norm": 0.9813210368156433, |
|
"learning_rate": 4.940352345897304e-06, |
|
"loss": 0.682, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.8441754916792739, |
|
"grad_norm": 1.8010449409484863, |
|
"learning_rate": 4.938946389968372e-06, |
|
"loss": 0.7639, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.848714069591528, |
|
"grad_norm": 2.293980121612549, |
|
"learning_rate": 4.937524261355535e-06, |
|
"loss": 0.7027, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.8532526475037822, |
|
"grad_norm": 1.9937771558761597, |
|
"learning_rate": 4.9360859694889475e-06, |
|
"loss": 0.688, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.8577912254160364, |
|
"grad_norm": 1.8442484140396118, |
|
"learning_rate": 4.934631523905938e-06, |
|
"loss": 0.6261, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.8623298033282905, |
|
"grad_norm": 9.626107215881348, |
|
"learning_rate": 4.933160934250957e-06, |
|
"loss": 0.7605, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8668683812405447, |
|
"grad_norm": 14.051777839660645, |
|
"learning_rate": 4.931674210275499e-06, |
|
"loss": 0.7183, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.8714069591527988, |
|
"grad_norm": 2.1191225051879883, |
|
"learning_rate": 4.930171361838052e-06, |
|
"loss": 0.697, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.875945537065053, |
|
"grad_norm": 1.4051584005355835, |
|
"learning_rate": 4.928652398904022e-06, |
|
"loss": 0.6985, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.8804841149773072, |
|
"grad_norm": 0.7633137106895447, |
|
"learning_rate": 4.92711733154567e-06, |
|
"loss": 0.7018, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.8850226928895613, |
|
"grad_norm": 1.5257729291915894, |
|
"learning_rate": 4.925566169942048e-06, |
|
"loss": 0.7108, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.8895612708018155, |
|
"grad_norm": 1.7885994911193848, |
|
"learning_rate": 4.9239989243789275e-06, |
|
"loss": 0.7251, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.8940998487140696, |
|
"grad_norm": 1.8133364915847778, |
|
"learning_rate": 4.922415605248734e-06, |
|
"loss": 0.691, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8986384266263238, |
|
"grad_norm": 1.3306565284729004, |
|
"learning_rate": 4.920816223050475e-06, |
|
"loss": 0.6496, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.903177004538578, |
|
"grad_norm": 2.5053746700286865, |
|
"learning_rate": 4.919200788389675e-06, |
|
"loss": 0.7174, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.9077155824508321, |
|
"grad_norm": 2.8250479698181152, |
|
"learning_rate": 4.917569311978301e-06, |
|
"loss": 0.6992, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9122541603630863, |
|
"grad_norm": 0.6993988752365112, |
|
"learning_rate": 4.915921804634693e-06, |
|
"loss": 0.6983, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.9167927382753404, |
|
"grad_norm": 2.591536045074463, |
|
"learning_rate": 4.914258277283494e-06, |
|
"loss": 0.6686, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.9213313161875946, |
|
"grad_norm": 1.7433080673217773, |
|
"learning_rate": 4.912578740955573e-06, |
|
"loss": 0.686, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.9258698940998488, |
|
"grad_norm": 1.5048431158065796, |
|
"learning_rate": 4.910883206787958e-06, |
|
"loss": 0.7043, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.9304084720121029, |
|
"grad_norm": 0.7160290479660034, |
|
"learning_rate": 4.9091716860237545e-06, |
|
"loss": 0.6703, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.9349470499243571, |
|
"grad_norm": 16.788084030151367, |
|
"learning_rate": 4.907444190012081e-06, |
|
"loss": 0.7465, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.9394856278366112, |
|
"grad_norm": 6.705326557159424, |
|
"learning_rate": 4.905700730207983e-06, |
|
"loss": 0.6692, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.9440242057488654, |
|
"grad_norm": 1.1539785861968994, |
|
"learning_rate": 4.903941318172365e-06, |
|
"loss": 0.6769, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.9485627836611196, |
|
"grad_norm": 0.806441068649292, |
|
"learning_rate": 4.902165965571911e-06, |
|
"loss": 0.6788, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.9531013615733737, |
|
"grad_norm": 2.599201202392578, |
|
"learning_rate": 4.900374684179005e-06, |
|
"loss": 0.6845, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.9576399394856279, |
|
"grad_norm": 1.9495007991790771, |
|
"learning_rate": 4.898567485871656e-06, |
|
"loss": 0.68, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.962178517397882, |
|
"grad_norm": 1.3471159934997559, |
|
"learning_rate": 4.896744382633419e-06, |
|
"loss": 0.6799, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.9667170953101362, |
|
"grad_norm": 2.436737537384033, |
|
"learning_rate": 4.894905386553316e-06, |
|
"loss": 0.6688, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.9712556732223904, |
|
"grad_norm": 2.486992597579956, |
|
"learning_rate": 4.893050509825749e-06, |
|
"loss": 0.6866, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.9757942511346445, |
|
"grad_norm": 2.137023687362671, |
|
"learning_rate": 4.891179764750434e-06, |
|
"loss": 0.6891, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.9803328290468987, |
|
"grad_norm": 1.3739172220230103, |
|
"learning_rate": 4.8892931637323e-06, |
|
"loss": 0.6219, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.9848714069591528, |
|
"grad_norm": 1.6083399057388306, |
|
"learning_rate": 4.887390719281423e-06, |
|
"loss": 0.6805, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.989409984871407, |
|
"grad_norm": 0.7767590284347534, |
|
"learning_rate": 4.885472444012937e-06, |
|
"loss": 0.6857, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.9939485627836612, |
|
"grad_norm": 0.8085631728172302, |
|
"learning_rate": 4.883538350646949e-06, |
|
"loss": 0.6594, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.9984871406959153, |
|
"grad_norm": 0.7762560844421387, |
|
"learning_rate": 4.881588452008457e-06, |
|
"loss": 0.6972, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.7762560844421387, |
|
"learning_rate": 4.8796227610272615e-06, |
|
"loss": 0.2152, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.0045385779122542, |
|
"grad_norm": 0.8768157958984375, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 0.6847, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.0090771558245084, |
|
"grad_norm": 2.3720204830169678, |
|
"learning_rate": 4.8756440542794805e-06, |
|
"loss": 0.6646, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.0136157337367624, |
|
"grad_norm": 2.1314537525177, |
|
"learning_rate": 4.873631064895749e-06, |
|
"loss": 0.676, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.0181543116490166, |
|
"grad_norm": 1.7491836547851562, |
|
"learning_rate": 4.871602335934847e-06, |
|
"loss": 0.6474, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.0226928895612708, |
|
"grad_norm": 1.7630263566970825, |
|
"learning_rate": 4.8695578808493034e-06, |
|
"loss": 0.6541, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.027231467473525, |
|
"grad_norm": 1.195518970489502, |
|
"learning_rate": 4.867497713195925e-06, |
|
"loss": 0.6529, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.0317700453857792, |
|
"grad_norm": 5.6356377601623535, |
|
"learning_rate": 4.8654218466357066e-06, |
|
"loss": 0.6666, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.0363086232980332, |
|
"grad_norm": 21.327880859375, |
|
"learning_rate": 4.863330294933748e-06, |
|
"loss": 0.6721, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.0408472012102874, |
|
"grad_norm": 14.34103012084961, |
|
"learning_rate": 4.8612230719591535e-06, |
|
"loss": 0.6374, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.0453857791225416, |
|
"grad_norm": 7.112085819244385, |
|
"learning_rate": 4.859100191684946e-06, |
|
"loss": 0.6729, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.0499243570347958, |
|
"grad_norm": 2.3837637901306152, |
|
"learning_rate": 4.856961668187968e-06, |
|
"loss": 0.6741, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.05446293494705, |
|
"grad_norm": 1.5125519037246704, |
|
"learning_rate": 4.854807515648799e-06, |
|
"loss": 0.6584, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.059001512859304, |
|
"grad_norm": 1.8248745203018188, |
|
"learning_rate": 4.852637748351651e-06, |
|
"loss": 0.6481, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.0635400907715582, |
|
"grad_norm": 2.260824203491211, |
|
"learning_rate": 4.850452380684275e-06, |
|
"loss": 0.6695, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.0680786686838124, |
|
"grad_norm": 1.764312982559204, |
|
"learning_rate": 4.848251427137875e-06, |
|
"loss": 0.6638, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.0726172465960666, |
|
"grad_norm": 0.9826205968856812, |
|
"learning_rate": 4.846034902306997e-06, |
|
"loss": 0.6515, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.0771558245083208, |
|
"grad_norm": 1.1290264129638672, |
|
"learning_rate": 4.8438028208894496e-06, |
|
"loss": 0.6483, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.0816944024205748, |
|
"grad_norm": 4.97009801864624, |
|
"learning_rate": 4.841555197686189e-06, |
|
"loss": 0.6605, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.086232980332829, |
|
"grad_norm": 6.06207799911499, |
|
"learning_rate": 4.839292047601234e-06, |
|
"loss": 0.6147, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.0907715582450832, |
|
"grad_norm": 1.2093101739883423, |
|
"learning_rate": 4.837013385641562e-06, |
|
"loss": 0.6739, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.0953101361573374, |
|
"grad_norm": 0.9535529017448425, |
|
"learning_rate": 4.834719226917007e-06, |
|
"loss": 0.6392, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.0998487140695916, |
|
"grad_norm": 1.4470645189285278, |
|
"learning_rate": 4.832409586640164e-06, |
|
"loss": 0.6357, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.1043872919818456, |
|
"grad_norm": 1.4899321794509888, |
|
"learning_rate": 4.830084480126288e-06, |
|
"loss": 0.6704, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.1089258698940998, |
|
"grad_norm": 0.7240656614303589, |
|
"learning_rate": 4.827743922793189e-06, |
|
"loss": 0.621, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.113464447806354, |
|
"grad_norm": 0.8096688985824585, |
|
"learning_rate": 4.8253879301611315e-06, |
|
"loss": 0.6561, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.1180030257186082, |
|
"grad_norm": 1.7580249309539795, |
|
"learning_rate": 4.823016517852731e-06, |
|
"loss": 0.5893, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.1225416036308624, |
|
"grad_norm": 2.5382940769195557, |
|
"learning_rate": 4.820629701592853e-06, |
|
"loss": 0.6548, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.1270801815431164, |
|
"grad_norm": 1.0767178535461426, |
|
"learning_rate": 4.8182274972085065e-06, |
|
"loss": 0.6801, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.1316187594553706, |
|
"grad_norm": 0.7919514179229736, |
|
"learning_rate": 4.815809920628738e-06, |
|
"loss": 0.6314, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.1361573373676248, |
|
"grad_norm": 1.305253267288208, |
|
"learning_rate": 4.813376987884527e-06, |
|
"loss": 0.6347, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.140695915279879, |
|
"grad_norm": 1.4656856060028076, |
|
"learning_rate": 4.810928715108683e-06, |
|
"loss": 0.6253, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.1452344931921332, |
|
"grad_norm": 1.2813221216201782, |
|
"learning_rate": 4.808465118535732e-06, |
|
"loss": 0.6751, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.1497730711043872, |
|
"grad_norm": 3.507342576980591, |
|
"learning_rate": 4.805986214501813e-06, |
|
"loss": 0.6606, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.1543116490166414, |
|
"grad_norm": 4.23391056060791, |
|
"learning_rate": 4.803492019444571e-06, |
|
"loss": 0.6278, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.1588502269288956, |
|
"grad_norm": 2.3074967861175537, |
|
"learning_rate": 4.8009825499030426e-06, |
|
"loss": 0.6175, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.1633888048411498, |
|
"grad_norm": 1.3244863748550415, |
|
"learning_rate": 4.798457822517554e-06, |
|
"loss": 0.6392, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.167927382753404, |
|
"grad_norm": 1.0530226230621338, |
|
"learning_rate": 4.795917854029601e-06, |
|
"loss": 0.6305, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.172465960665658, |
|
"grad_norm": 2.187415599822998, |
|
"learning_rate": 4.79336266128175e-06, |
|
"loss": 0.6432, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.1770045385779122, |
|
"grad_norm": 1.4672960042953491, |
|
"learning_rate": 4.790792261217513e-06, |
|
"loss": 0.649, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.1815431164901664, |
|
"grad_norm": 1.1620965003967285, |
|
"learning_rate": 4.788206670881245e-06, |
|
"loss": 0.6507, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.1860816944024206, |
|
"grad_norm": 0.7209274768829346, |
|
"learning_rate": 4.785605907418029e-06, |
|
"loss": 0.6502, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.1906202723146748, |
|
"grad_norm": 3.6349446773529053, |
|
"learning_rate": 4.78298998807356e-06, |
|
"loss": 0.6251, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.1951588502269288, |
|
"grad_norm": 13.263801574707031, |
|
"learning_rate": 4.7803589301940306e-06, |
|
"loss": 0.6663, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.199697428139183, |
|
"grad_norm": 12.560731887817383, |
|
"learning_rate": 4.777712751226019e-06, |
|
"loss": 0.6709, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.2042360060514372, |
|
"grad_norm": 3.721285820007324, |
|
"learning_rate": 4.775051468716371e-06, |
|
"loss": 0.6555, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.2087745839636914, |
|
"grad_norm": 0.974590539932251, |
|
"learning_rate": 4.772375100312084e-06, |
|
"loss": 0.6308, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.2133131618759456, |
|
"grad_norm": 1.3410248756408691, |
|
"learning_rate": 4.769683663760191e-06, |
|
"loss": 0.6506, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.2178517397881996, |
|
"grad_norm": 1.4115511178970337, |
|
"learning_rate": 4.7669771769076395e-06, |
|
"loss": 0.6296, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.2223903177004538, |
|
"grad_norm": 1.355098843574524, |
|
"learning_rate": 4.764255657701179e-06, |
|
"loss": 0.6756, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.226928895612708, |
|
"grad_norm": 0.9310200810432434, |
|
"learning_rate": 4.761519124187237e-06, |
|
"loss": 0.6724, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.2314674735249622, |
|
"grad_norm": 1.0098122358322144, |
|
"learning_rate": 4.758767594511801e-06, |
|
"loss": 0.6595, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.2360060514372164, |
|
"grad_norm": 2.7444238662719727, |
|
"learning_rate": 4.7560010869202985e-06, |
|
"loss": 0.582, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.2405446293494704, |
|
"grad_norm": 2.2694830894470215, |
|
"learning_rate": 4.753219619757477e-06, |
|
"loss": 0.6411, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.2450832072617246, |
|
"grad_norm": 1.1762354373931885, |
|
"learning_rate": 4.750423211467278e-06, |
|
"loss": 0.6358, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.2496217851739788, |
|
"grad_norm": 0.86478191614151, |
|
"learning_rate": 4.7476118805927214e-06, |
|
"loss": 0.6234, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.254160363086233, |
|
"grad_norm": 1.143272876739502, |
|
"learning_rate": 4.7447856457757765e-06, |
|
"loss": 0.6627, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.258698940998487, |
|
"grad_norm": 1.7226762771606445, |
|
"learning_rate": 4.7419445257572414e-06, |
|
"loss": 0.6248, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.2632375189107412, |
|
"grad_norm": 1.428463101387024, |
|
"learning_rate": 4.739088539376618e-06, |
|
"loss": 0.6577, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.2677760968229954, |
|
"grad_norm": 0.9464501142501831, |
|
"learning_rate": 4.736217705571989e-06, |
|
"loss": 0.6464, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.2723146747352496, |
|
"grad_norm": 0.8889546394348145, |
|
"learning_rate": 4.733332043379889e-06, |
|
"loss": 0.6249, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.2768532526475038, |
|
"grad_norm": 0.7456269860267639, |
|
"learning_rate": 4.730431571935178e-06, |
|
"loss": 0.6242, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.281391830559758, |
|
"grad_norm": 9.802299499511719, |
|
"learning_rate": 4.72751631047092e-06, |
|
"loss": 0.6576, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.2859304084720122, |
|
"grad_norm": 15.863835334777832, |
|
"learning_rate": 4.72458627831825e-06, |
|
"loss": 0.6916, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.2904689863842662, |
|
"grad_norm": 15.025418281555176, |
|
"learning_rate": 4.721641494906247e-06, |
|
"loss": 0.7036, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.2950075642965204, |
|
"grad_norm": 3.8970537185668945, |
|
"learning_rate": 4.718681979761806e-06, |
|
"loss": 0.6166, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.2995461422087746, |
|
"grad_norm": 0.6507979035377502, |
|
"learning_rate": 4.715707752509512e-06, |
|
"loss": 0.613, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.3040847201210286, |
|
"grad_norm": 1.1878042221069336, |
|
"learning_rate": 4.712718832871499e-06, |
|
"loss": 0.6474, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.3086232980332828, |
|
"grad_norm": 1.0940614938735962, |
|
"learning_rate": 4.709715240667332e-06, |
|
"loss": 0.6577, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.313161875945537, |
|
"grad_norm": 0.9987061619758606, |
|
"learning_rate": 4.706696995813869e-06, |
|
"loss": 0.6571, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.3177004538577912, |
|
"grad_norm": 1.5589380264282227, |
|
"learning_rate": 4.7036641183251285e-06, |
|
"loss": 0.6495, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.3222390317700454, |
|
"grad_norm": 1.525474190711975, |
|
"learning_rate": 4.700616628312159e-06, |
|
"loss": 0.5986, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.3267776096822996, |
|
"grad_norm": 0.8548336625099182, |
|
"learning_rate": 4.697554545982904e-06, |
|
"loss": 0.6034, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.3313161875945538, |
|
"grad_norm": 4.231250286102295, |
|
"learning_rate": 4.6944778916420705e-06, |
|
"loss": 0.6405, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.3358547655068078, |
|
"grad_norm": 8.273162841796875, |
|
"learning_rate": 4.691386685690993e-06, |
|
"loss": 0.6635, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.340393343419062, |
|
"grad_norm": 4.974193096160889, |
|
"learning_rate": 4.6882809486274934e-06, |
|
"loss": 0.6289, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.3449319213313162, |
|
"grad_norm": 3.757338523864746, |
|
"learning_rate": 4.685160701045757e-06, |
|
"loss": 0.6227, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.3494704992435702, |
|
"grad_norm": 1.2015799283981323, |
|
"learning_rate": 4.68202596363618e-06, |
|
"loss": 0.6237, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.3540090771558244, |
|
"grad_norm": 0.7638722658157349, |
|
"learning_rate": 4.678876757185248e-06, |
|
"loss": 0.6063, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.3585476550680786, |
|
"grad_norm": 1.2864232063293457, |
|
"learning_rate": 4.675713102575389e-06, |
|
"loss": 0.5997, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.3630862329803328, |
|
"grad_norm": 1.1902930736541748, |
|
"learning_rate": 4.672535020784833e-06, |
|
"loss": 0.6352, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.367624810892587, |
|
"grad_norm": 1.4321516752243042, |
|
"learning_rate": 4.669342532887482e-06, |
|
"loss": 0.6531, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.3721633888048412, |
|
"grad_norm": 1.139543890953064, |
|
"learning_rate": 4.666135660052764e-06, |
|
"loss": 0.6235, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.3767019667170954, |
|
"grad_norm": 0.7234447598457336, |
|
"learning_rate": 4.66291442354549e-06, |
|
"loss": 0.6012, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.3812405446293494, |
|
"grad_norm": 3.093146324157715, |
|
"learning_rate": 4.659678844725722e-06, |
|
"loss": 0.6058, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.3857791225416036, |
|
"grad_norm": 2.840275764465332, |
|
"learning_rate": 4.656428945048622e-06, |
|
"loss": 0.6139, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.3903177004538578, |
|
"grad_norm": 1.0061054229736328, |
|
"learning_rate": 4.653164746064315e-06, |
|
"loss": 0.6288, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.394856278366112, |
|
"grad_norm": 0.9403374195098877, |
|
"learning_rate": 4.649886269417746e-06, |
|
"loss": 0.6435, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.399394856278366, |
|
"grad_norm": 1.0838265419006348, |
|
"learning_rate": 4.646593536848535e-06, |
|
"loss": 0.6485, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.4039334341906202, |
|
"grad_norm": 1.2738953828811646, |
|
"learning_rate": 4.643286570190832e-06, |
|
"loss": 0.5993, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.4084720121028744, |
|
"grad_norm": 1.3124756813049316, |
|
"learning_rate": 4.639965391373173e-06, |
|
"loss": 0.6154, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.4130105900151286, |
|
"grad_norm": 0.7026720643043518, |
|
"learning_rate": 4.636630022418337e-06, |
|
"loss": 0.6493, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.4175491679273828, |
|
"grad_norm": 1.101508617401123, |
|
"learning_rate": 4.6332804854431986e-06, |
|
"loss": 0.6437, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.422087745839637, |
|
"grad_norm": 0.6824156641960144, |
|
"learning_rate": 4.6299168026585775e-06, |
|
"loss": 0.6017, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.426626323751891, |
|
"grad_norm": 0.8083431124687195, |
|
"learning_rate": 4.626538996369096e-06, |
|
"loss": 0.6338, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.4311649016641452, |
|
"grad_norm": 0.9624136090278625, |
|
"learning_rate": 4.623147088973031e-06, |
|
"loss": 0.5804, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.4357034795763994, |
|
"grad_norm": 0.8000622987747192, |
|
"learning_rate": 4.619741102962161e-06, |
|
"loss": 0.6242, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.4402420574886536, |
|
"grad_norm": 1.2038214206695557, |
|
"learning_rate": 4.6163210609216234e-06, |
|
"loss": 0.6259, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.4447806354009076, |
|
"grad_norm": 0.8374214768409729, |
|
"learning_rate": 4.612886985529759e-06, |
|
"loss": 0.6078, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.4493192133131618, |
|
"grad_norm": 1.0167770385742188, |
|
"learning_rate": 4.609438899557964e-06, |
|
"loss": 0.5972, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.453857791225416, |
|
"grad_norm": 0.8266498446464539, |
|
"learning_rate": 4.60597682587054e-06, |
|
"loss": 0.6211, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.4583963691376702, |
|
"grad_norm": 0.7585692405700684, |
|
"learning_rate": 4.6025007874245405e-06, |
|
"loss": 0.6233, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.4629349470499244, |
|
"grad_norm": 1.6358634233474731, |
|
"learning_rate": 4.59901080726962e-06, |
|
"loss": 0.6075, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.4674735249621786, |
|
"grad_norm": 1.1722335815429688, |
|
"learning_rate": 4.595506908547881e-06, |
|
"loss": 0.6066, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.4720121028744326, |
|
"grad_norm": 0.9726622104644775, |
|
"learning_rate": 4.591989114493718e-06, |
|
"loss": 0.6506, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.4765506807866868, |
|
"grad_norm": 0.8073020577430725, |
|
"learning_rate": 4.588457448433667e-06, |
|
"loss": 0.6077, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.481089258698941, |
|
"grad_norm": 0.71394282579422, |
|
"learning_rate": 4.584911933786252e-06, |
|
"loss": 0.5882, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.4856278366111952, |
|
"grad_norm": 4.143211364746094, |
|
"learning_rate": 4.581352594061824e-06, |
|
"loss": 0.6047, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.4901664145234492, |
|
"grad_norm": 3.5801639556884766, |
|
"learning_rate": 4.5777794528624075e-06, |
|
"loss": 0.6094, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.4947049924357034, |
|
"grad_norm": 0.9617034792900085, |
|
"learning_rate": 4.574192533881547e-06, |
|
"loss": 0.6291, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.4992435703479576, |
|
"grad_norm": 0.8535535931587219, |
|
"learning_rate": 4.570591860904149e-06, |
|
"loss": 0.6587, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.5037821482602118, |
|
"grad_norm": 1.426477074623108, |
|
"learning_rate": 4.566977457806317e-06, |
|
"loss": 0.6347, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.508320726172466, |
|
"grad_norm": 1.6053332090377808, |
|
"learning_rate": 4.563349348555207e-06, |
|
"loss": 0.603, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.5128593040847202, |
|
"grad_norm": 1.3673542737960815, |
|
"learning_rate": 4.5597075572088545e-06, |
|
"loss": 0.6443, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.5173978819969742, |
|
"grad_norm": 1.0444583892822266, |
|
"learning_rate": 4.556052107916023e-06, |
|
"loss": 0.6033, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.5219364599092284, |
|
"grad_norm": 2.568854331970215, |
|
"learning_rate": 4.552383024916044e-06, |
|
"loss": 0.6364, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.5264750378214826, |
|
"grad_norm": 0.8063260316848755, |
|
"learning_rate": 4.54870033253865e-06, |
|
"loss": 0.6406, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.5310136157337366, |
|
"grad_norm": 0.8449574112892151, |
|
"learning_rate": 4.545004055203823e-06, |
|
"loss": 0.5977, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.5355521936459908, |
|
"grad_norm": 0.7573151588439941, |
|
"learning_rate": 4.541294217421622e-06, |
|
"loss": 0.6098, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.540090771558245, |
|
"grad_norm": 0.7103497982025146, |
|
"learning_rate": 4.537570843792028e-06, |
|
"loss": 0.6344, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.5446293494704992, |
|
"grad_norm": 0.7327162623405457, |
|
"learning_rate": 4.5338339590047795e-06, |
|
"loss": 0.6318, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.5491679273827534, |
|
"grad_norm": 0.7688593864440918, |
|
"learning_rate": 4.530083587839204e-06, |
|
"loss": 0.6089, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.5537065052950076, |
|
"grad_norm": 0.9933049082756042, |
|
"learning_rate": 4.52631975516406e-06, |
|
"loss": 0.6003, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.5582450832072618, |
|
"grad_norm": 0.7319652438163757, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 0.6054, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.5627836611195158, |
|
"grad_norm": 0.8350914120674133, |
|
"learning_rate": 4.518751805206251e-06, |
|
"loss": 0.606, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.56732223903177, |
|
"grad_norm": 1.7987092733383179, |
|
"learning_rate": 4.514947738106755e-06, |
|
"loss": 0.6637, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.5718608169440242, |
|
"grad_norm": 0.6338518261909485, |
|
"learning_rate": 4.5111303098637005e-06, |
|
"loss": 0.5778, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.5763993948562782, |
|
"grad_norm": 0.854932427406311, |
|
"learning_rate": 4.5072995457905e-06, |
|
"loss": 0.598, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.5809379727685324, |
|
"grad_norm": 1.217940330505371, |
|
"learning_rate": 4.503455471288998e-06, |
|
"loss": 0.6087, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.5854765506807866, |
|
"grad_norm": 1.329987645149231, |
|
"learning_rate": 4.499598111849299e-06, |
|
"loss": 0.6321, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.5900151285930408, |
|
"grad_norm": 1.9689991474151611, |
|
"learning_rate": 4.495727493049604e-06, |
|
"loss": 0.6361, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.594553706505295, |
|
"grad_norm": 1.3316866159439087, |
|
"learning_rate": 4.491843640556033e-06, |
|
"loss": 0.6097, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.5990922844175492, |
|
"grad_norm": 1.2030465602874756, |
|
"learning_rate": 4.4879465801224605e-06, |
|
"loss": 0.6302, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.6036308623298035, |
|
"grad_norm": 0.9129522442817688, |
|
"learning_rate": 4.484036337590343e-06, |
|
"loss": 0.6398, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.6081694402420574, |
|
"grad_norm": 1.6810179948806763, |
|
"learning_rate": 4.4801129388885475e-06, |
|
"loss": 0.6234, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.6127080181543116, |
|
"grad_norm": 3.6033570766448975, |
|
"learning_rate": 4.476176410033179e-06, |
|
"loss": 0.6145, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.6172465960665658, |
|
"grad_norm": 1.2315465211868286, |
|
"learning_rate": 4.472226777127412e-06, |
|
"loss": 0.6274, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.6217851739788198, |
|
"grad_norm": 0.7585744261741638, |
|
"learning_rate": 4.468264066361308e-06, |
|
"loss": 0.5897, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.626323751891074, |
|
"grad_norm": 0.945957362651825, |
|
"learning_rate": 4.464288304011652e-06, |
|
"loss": 0.6078, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.6308623298033282, |
|
"grad_norm": 1.0154330730438232, |
|
"learning_rate": 4.460299516441777e-06, |
|
"loss": 0.5899, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.6354009077155824, |
|
"grad_norm": 0.8923754096031189, |
|
"learning_rate": 4.456297730101379e-06, |
|
"loss": 0.6204, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.6399394856278366, |
|
"grad_norm": 0.9550593495368958, |
|
"learning_rate": 4.452282971526355e-06, |
|
"loss": 0.5699, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.6444780635400909, |
|
"grad_norm": 0.7987310886383057, |
|
"learning_rate": 4.448255267338619e-06, |
|
"loss": 0.6325, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.649016641452345, |
|
"grad_norm": 0.830464780330658, |
|
"learning_rate": 4.444214644245928e-06, |
|
"loss": 0.6367, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.653555219364599, |
|
"grad_norm": 1.604446530342102, |
|
"learning_rate": 4.440161129041704e-06, |
|
"loss": 0.5668, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.6580937972768532, |
|
"grad_norm": 0.6995673179626465, |
|
"learning_rate": 4.436094748604856e-06, |
|
"loss": 0.596, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.6626323751891074, |
|
"grad_norm": 0.6944538950920105, |
|
"learning_rate": 4.432015529899604e-06, |
|
"loss": 0.6082, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.6671709531013614, |
|
"grad_norm": 0.9797276854515076, |
|
"learning_rate": 4.427923499975298e-06, |
|
"loss": 0.6104, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.6717095310136156, |
|
"grad_norm": 0.7624075412750244, |
|
"learning_rate": 4.423818685966239e-06, |
|
"loss": 0.5721, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.6762481089258698, |
|
"grad_norm": 0.8912142515182495, |
|
"learning_rate": 4.4197011150915e-06, |
|
"loss": 0.6162, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.680786686838124, |
|
"grad_norm": 1.4948642253875732, |
|
"learning_rate": 4.415570814654746e-06, |
|
"loss": 0.6355, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.6853252647503782, |
|
"grad_norm": 3.306320905685425, |
|
"learning_rate": 4.4114278120440494e-06, |
|
"loss": 0.6077, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.6898638426626325, |
|
"grad_norm": 0.6849818229675293, |
|
"learning_rate": 4.407272134731711e-06, |
|
"loss": 0.621, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.6944024205748867, |
|
"grad_norm": 0.9957187175750732, |
|
"learning_rate": 4.403103810274082e-06, |
|
"loss": 0.6468, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.6989409984871406, |
|
"grad_norm": 0.926688551902771, |
|
"learning_rate": 4.398922866311371e-06, |
|
"loss": 0.6021, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.7034795763993948, |
|
"grad_norm": 0.8220088481903076, |
|
"learning_rate": 4.394729330567471e-06, |
|
"loss": 0.5753, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.708018154311649, |
|
"grad_norm": 0.8064286112785339, |
|
"learning_rate": 4.390523230849769e-06, |
|
"loss": 0.6275, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.712556732223903, |
|
"grad_norm": 0.7482770681381226, |
|
"learning_rate": 4.386304595048966e-06, |
|
"loss": 0.6103, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.7170953101361572, |
|
"grad_norm": 1.6559797525405884, |
|
"learning_rate": 4.382073451138887e-06, |
|
"loss": 0.6366, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.7216338880484114, |
|
"grad_norm": 0.6992952227592468, |
|
"learning_rate": 4.3778298271762995e-06, |
|
"loss": 0.6188, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.7261724659606656, |
|
"grad_norm": 0.6812805533409119, |
|
"learning_rate": 4.373573751300729e-06, |
|
"loss": 0.6103, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.7307110438729199, |
|
"grad_norm": 0.767241358757019, |
|
"learning_rate": 4.369305251734267e-06, |
|
"loss": 0.6089, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.735249621785174, |
|
"grad_norm": 1.5500905513763428, |
|
"learning_rate": 4.365024356781386e-06, |
|
"loss": 0.6087, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.7397881996974283, |
|
"grad_norm": 0.8380416631698608, |
|
"learning_rate": 4.360731094828755e-06, |
|
"loss": 0.6074, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.7443267776096822, |
|
"grad_norm": 0.915096640586853, |
|
"learning_rate": 4.356425494345047e-06, |
|
"loss": 0.5962, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.7488653555219364, |
|
"grad_norm": 0.9544028639793396, |
|
"learning_rate": 4.352107583880753e-06, |
|
"loss": 0.5766, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.7534039334341907, |
|
"grad_norm": 0.7770220041275024, |
|
"learning_rate": 4.347777392067991e-06, |
|
"loss": 0.5879, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.7579425113464446, |
|
"grad_norm": 3.470493793487549, |
|
"learning_rate": 4.343434947620316e-06, |
|
"loss": 0.6107, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.7624810892586988, |
|
"grad_norm": 1.7937536239624023, |
|
"learning_rate": 4.339080279332531e-06, |
|
"loss": 0.5892, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.767019667170953, |
|
"grad_norm": 1.262220859527588, |
|
"learning_rate": 4.334713416080498e-06, |
|
"loss": 0.6321, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.7715582450832073, |
|
"grad_norm": 0.6722662448883057, |
|
"learning_rate": 4.33033438682094e-06, |
|
"loss": 0.6366, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.7760968229954615, |
|
"grad_norm": 1.0483866930007935, |
|
"learning_rate": 4.3259432205912544e-06, |
|
"loss": 0.5867, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.7806354009077157, |
|
"grad_norm": 1.2742741107940674, |
|
"learning_rate": 4.32153994650932e-06, |
|
"loss": 0.6045, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.7851739788199699, |
|
"grad_norm": 1.2766985893249512, |
|
"learning_rate": 4.317124593773301e-06, |
|
"loss": 0.5952, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.789712556732224, |
|
"grad_norm": 0.8167585134506226, |
|
"learning_rate": 4.312697191661457e-06, |
|
"loss": 0.5621, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.794251134644478, |
|
"grad_norm": 0.7818560004234314, |
|
"learning_rate": 4.308257769531947e-06, |
|
"loss": 0.5897, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.7987897125567323, |
|
"grad_norm": 1.0983150005340576, |
|
"learning_rate": 4.303806356822635e-06, |
|
"loss": 0.6189, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.8033282904689862, |
|
"grad_norm": 3.0957119464874268, |
|
"learning_rate": 4.299342983050892e-06, |
|
"loss": 0.5743, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.8078668683812404, |
|
"grad_norm": 1.364321231842041, |
|
"learning_rate": 4.294867677813407e-06, |
|
"loss": 0.5722, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.8124054462934946, |
|
"grad_norm": 1.0932508707046509, |
|
"learning_rate": 4.290380470785984e-06, |
|
"loss": 0.6074, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.8169440242057489, |
|
"grad_norm": 0.8298100829124451, |
|
"learning_rate": 4.285881391723348e-06, |
|
"loss": 0.6143, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.821482602118003, |
|
"grad_norm": 0.7821558117866516, |
|
"learning_rate": 4.2813704704589504e-06, |
|
"loss": 0.6148, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.8260211800302573, |
|
"grad_norm": 1.2309906482696533, |
|
"learning_rate": 4.276847736904765e-06, |
|
"loss": 0.6039, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.8305597579425115, |
|
"grad_norm": 0.7675696015357971, |
|
"learning_rate": 4.272313221051094e-06, |
|
"loss": 0.5869, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.8350983358547657, |
|
"grad_norm": 0.6467660069465637, |
|
"learning_rate": 4.267766952966369e-06, |
|
"loss": 0.5999, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.8396369137670197, |
|
"grad_norm": 2.9403133392333984, |
|
"learning_rate": 4.263208962796951e-06, |
|
"loss": 0.5859, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.8441754916792739, |
|
"grad_norm": 1.017529845237732, |
|
"learning_rate": 4.2586392807669286e-06, |
|
"loss": 0.5771, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.8487140695915278, |
|
"grad_norm": 0.7939811944961548, |
|
"learning_rate": 4.25405793717792e-06, |
|
"loss": 0.5968, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.853252647503782, |
|
"grad_norm": 0.9015148878097534, |
|
"learning_rate": 4.2494649624088724e-06, |
|
"loss": 0.5791, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.8577912254160363, |
|
"grad_norm": 1.0004379749298096, |
|
"learning_rate": 4.2448603869158585e-06, |
|
"loss": 0.5969, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.8623298033282905, |
|
"grad_norm": 0.8573418855667114, |
|
"learning_rate": 4.2402442412318765e-06, |
|
"loss": 0.6308, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.8668683812405447, |
|
"grad_norm": 0.7616469860076904, |
|
"learning_rate": 4.235616555966646e-06, |
|
"loss": 0.5955, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.8714069591527989, |
|
"grad_norm": 0.838377833366394, |
|
"learning_rate": 4.2309773618064035e-06, |
|
"loss": 0.6135, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.875945537065053, |
|
"grad_norm": 0.6853629350662231, |
|
"learning_rate": 4.226326689513705e-06, |
|
"loss": 0.5962, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.8804841149773073, |
|
"grad_norm": 3.4511594772338867, |
|
"learning_rate": 4.221664569927217e-06, |
|
"loss": 0.632, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.8850226928895613, |
|
"grad_norm": 2.316239833831787, |
|
"learning_rate": 4.216991033961511e-06, |
|
"loss": 0.5712, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.8895612708018155, |
|
"grad_norm": 0.7679340243339539, |
|
"learning_rate": 4.212306112606863e-06, |
|
"loss": 0.5849, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.8940998487140694, |
|
"grad_norm": 0.8144194483757019, |
|
"learning_rate": 4.207609836929045e-06, |
|
"loss": 0.5586, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.8986384266263236, |
|
"grad_norm": 0.7225912809371948, |
|
"learning_rate": 4.2029022380691195e-06, |
|
"loss": 0.606, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.9031770045385779, |
|
"grad_norm": 0.800234854221344, |
|
"learning_rate": 4.198183347243233e-06, |
|
"loss": 0.6024, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.907715582450832, |
|
"grad_norm": 0.7729604840278625, |
|
"learning_rate": 4.1934531957424095e-06, |
|
"loss": 0.598, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.9122541603630863, |
|
"grad_norm": 0.6805166602134705, |
|
"learning_rate": 4.188711814932343e-06, |
|
"loss": 0.6148, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.9167927382753405, |
|
"grad_norm": 1.0123629570007324, |
|
"learning_rate": 4.1839592362531875e-06, |
|
"loss": 0.6029, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.9213313161875947, |
|
"grad_norm": 0.8777531385421753, |
|
"learning_rate": 4.179195491219353e-06, |
|
"loss": 0.5721, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.9258698940998489, |
|
"grad_norm": 0.8781999945640564, |
|
"learning_rate": 4.1744206114192895e-06, |
|
"loss": 0.5761, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.9304084720121029, |
|
"grad_norm": 0.909726083278656, |
|
"learning_rate": 4.169634628515288e-06, |
|
"loss": 0.6101, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.934947049924357, |
|
"grad_norm": 0.8270307779312134, |
|
"learning_rate": 4.164837574243259e-06, |
|
"loss": 0.5635, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.939485627836611, |
|
"grad_norm": 0.8078930974006653, |
|
"learning_rate": 4.16002948041253e-06, |
|
"loss": 0.6117, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.9440242057488653, |
|
"grad_norm": 1.2975406646728516, |
|
"learning_rate": 4.155210378905629e-06, |
|
"loss": 0.6157, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.9485627836611195, |
|
"grad_norm": 1.0585848093032837, |
|
"learning_rate": 4.15038030167808e-06, |
|
"loss": 0.6252, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.9531013615733737, |
|
"grad_norm": 1.106614112854004, |
|
"learning_rate": 4.145539280758184e-06, |
|
"loss": 0.5781, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.9576399394856279, |
|
"grad_norm": 0.7705745697021484, |
|
"learning_rate": 4.140687348246814e-06, |
|
"loss": 0.6134, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.962178517397882, |
|
"grad_norm": 1.1777352094650269, |
|
"learning_rate": 4.1358245363171905e-06, |
|
"loss": 0.6231, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.9667170953101363, |
|
"grad_norm": 0.9292231798171997, |
|
"learning_rate": 4.130950877214683e-06, |
|
"loss": 0.6086, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.9712556732223905, |
|
"grad_norm": 1.0536510944366455, |
|
"learning_rate": 4.126066403256585e-06, |
|
"loss": 0.6077, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.9757942511346445, |
|
"grad_norm": 0.7694706916809082, |
|
"learning_rate": 4.121171146831905e-06, |
|
"loss": 0.6318, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.9803328290468987, |
|
"grad_norm": 1.4091219902038574, |
|
"learning_rate": 4.116265140401148e-06, |
|
"loss": 0.5873, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.9848714069591527, |
|
"grad_norm": 1.4843878746032715, |
|
"learning_rate": 4.111348416496104e-06, |
|
"loss": 0.5748, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.9894099848714069, |
|
"grad_norm": 2.431475877761841, |
|
"learning_rate": 4.106421007719631e-06, |
|
"loss": 0.6155, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.993948562783661, |
|
"grad_norm": 0.689834475517273, |
|
"learning_rate": 4.101482946745438e-06, |
|
"loss": 0.5792, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.9984871406959153, |
|
"grad_norm": 0.7212426662445068, |
|
"learning_rate": 4.096534266317869e-06, |
|
"loss": 0.6106, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.7212426662445068, |
|
"learning_rate": 4.091574999251685e-06, |
|
"loss": 0.1986, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 2.004538577912254, |
|
"grad_norm": 0.859722912311554, |
|
"learning_rate": 4.086605178431848e-06, |
|
"loss": 0.5424, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 2.0090771558245084, |
|
"grad_norm": 0.782247006893158, |
|
"learning_rate": 4.0816248368133015e-06, |
|
"loss": 0.5169, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 2.0136157337367626, |
|
"grad_norm": 0.7215720415115356, |
|
"learning_rate": 4.076634007420754e-06, |
|
"loss": 0.5712, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.018154311649017, |
|
"grad_norm": 0.7926766872406006, |
|
"learning_rate": 4.0716327233484544e-06, |
|
"loss": 0.5433, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 2.022692889561271, |
|
"grad_norm": 0.7854951620101929, |
|
"learning_rate": 4.066621017759984e-06, |
|
"loss": 0.57, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 2.027231467473525, |
|
"grad_norm": 0.7837132215499878, |
|
"learning_rate": 4.0615989238880215e-06, |
|
"loss": 0.5367, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 2.031770045385779, |
|
"grad_norm": 1.0618802309036255, |
|
"learning_rate": 4.056566475034136e-06, |
|
"loss": 0.5247, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 2.036308623298033, |
|
"grad_norm": 0.7492559552192688, |
|
"learning_rate": 4.051523704568557e-06, |
|
"loss": 0.5398, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.0408472012102874, |
|
"grad_norm": 1.0183978080749512, |
|
"learning_rate": 4.04647064592996e-06, |
|
"loss": 0.5487, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 2.0453857791225416, |
|
"grad_norm": 0.8974794745445251, |
|
"learning_rate": 4.041407332625238e-06, |
|
"loss": 0.5551, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 2.049924357034796, |
|
"grad_norm": 0.6771326661109924, |
|
"learning_rate": 4.0363337982292865e-06, |
|
"loss": 0.5647, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 2.05446293494705, |
|
"grad_norm": 0.9089450240135193, |
|
"learning_rate": 4.031250076384774e-06, |
|
"loss": 0.5743, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 2.059001512859304, |
|
"grad_norm": 0.641944944858551, |
|
"learning_rate": 4.026156200801924e-06, |
|
"loss": 0.5344, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.0635400907715584, |
|
"grad_norm": 0.7394944429397583, |
|
"learning_rate": 4.021052205258288e-06, |
|
"loss": 0.5856, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 2.068078668683812, |
|
"grad_norm": 2.491907835006714, |
|
"learning_rate": 4.015938123598525e-06, |
|
"loss": 0.5597, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 2.0726172465960664, |
|
"grad_norm": 1.3713676929473877, |
|
"learning_rate": 4.010813989734174e-06, |
|
"loss": 0.5324, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 2.0771558245083206, |
|
"grad_norm": 0.7260848879814148, |
|
"learning_rate": 4.00567983764343e-06, |
|
"loss": 0.5529, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 2.081694402420575, |
|
"grad_norm": 1.168824553489685, |
|
"learning_rate": 4.0005357013709215e-06, |
|
"loss": 0.6031, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.086232980332829, |
|
"grad_norm": 0.9732924699783325, |
|
"learning_rate": 3.995381615027477e-06, |
|
"loss": 0.5643, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 2.090771558245083, |
|
"grad_norm": 1.2031774520874023, |
|
"learning_rate": 3.990217612789909e-06, |
|
"loss": 0.5651, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 2.0953101361573374, |
|
"grad_norm": 0.7182034850120544, |
|
"learning_rate": 3.985043728900782e-06, |
|
"loss": 0.5507, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 2.0998487140695916, |
|
"grad_norm": 2.1731138229370117, |
|
"learning_rate": 3.979859997668182e-06, |
|
"loss": 0.5581, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 2.104387291981846, |
|
"grad_norm": 1.6525670289993286, |
|
"learning_rate": 3.9746664534654975e-06, |
|
"loss": 0.5827, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.1089258698941, |
|
"grad_norm": 0.9597675800323486, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 0.5685, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 2.1134644478063542, |
|
"grad_norm": 0.7419756650924683, |
|
"learning_rate": 3.964250063968537e-06, |
|
"loss": 0.5759, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 2.118003025718608, |
|
"grad_norm": 0.76424241065979, |
|
"learning_rate": 3.959027287745471e-06, |
|
"loss": 0.5648, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 2.122541603630862, |
|
"grad_norm": 0.8645866513252258, |
|
"learning_rate": 3.95379483669428e-06, |
|
"loss": 0.5594, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 2.1270801815431164, |
|
"grad_norm": 1.0087001323699951, |
|
"learning_rate": 3.9485527455114095e-06, |
|
"loss": 0.5359, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.1316187594553706, |
|
"grad_norm": 1.055308222770691, |
|
"learning_rate": 3.943301048957233e-06, |
|
"loss": 0.5465, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 2.136157337367625, |
|
"grad_norm": 1.540602445602417, |
|
"learning_rate": 3.9380397818558154e-06, |
|
"loss": 0.5647, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 2.140695915279879, |
|
"grad_norm": 0.6511226892471313, |
|
"learning_rate": 3.932768979094685e-06, |
|
"loss": 0.5102, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 2.145234493192133, |
|
"grad_norm": 0.6932368278503418, |
|
"learning_rate": 3.927488675624599e-06, |
|
"loss": 0.5934, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 2.1497730711043874, |
|
"grad_norm": 0.6622692942619324, |
|
"learning_rate": 3.922198906459318e-06, |
|
"loss": 0.5783, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.1543116490166416, |
|
"grad_norm": 0.9521903991699219, |
|
"learning_rate": 3.916899706675366e-06, |
|
"loss": 0.5642, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 2.1588502269288954, |
|
"grad_norm": 1.9041461944580078, |
|
"learning_rate": 3.911591111411802e-06, |
|
"loss": 0.5631, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 2.1633888048411496, |
|
"grad_norm": 0.7302039861679077, |
|
"learning_rate": 3.906273155869988e-06, |
|
"loss": 0.5907, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 2.167927382753404, |
|
"grad_norm": 0.9459813237190247, |
|
"learning_rate": 3.900945875313353e-06, |
|
"loss": 0.5767, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 2.172465960665658, |
|
"grad_norm": 0.9118275046348572, |
|
"learning_rate": 3.895609305067162e-06, |
|
"loss": 0.5699, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.177004538577912, |
|
"grad_norm": 0.9915804266929626, |
|
"learning_rate": 3.890263480518278e-06, |
|
"loss": 0.5692, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 2.1815431164901664, |
|
"grad_norm": 0.7956082820892334, |
|
"learning_rate": 3.884908437114931e-06, |
|
"loss": 0.5809, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 2.1860816944024206, |
|
"grad_norm": 0.7874560952186584, |
|
"learning_rate": 3.879544210366479e-06, |
|
"loss": 0.5426, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 2.190620272314675, |
|
"grad_norm": 0.672660231590271, |
|
"learning_rate": 3.8741708358431776e-06, |
|
"loss": 0.5562, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 2.195158850226929, |
|
"grad_norm": 0.9052623510360718, |
|
"learning_rate": 3.868788349175939e-06, |
|
"loss": 0.5946, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.1996974281391832, |
|
"grad_norm": 2.023263692855835, |
|
"learning_rate": 3.863396786056102e-06, |
|
"loss": 0.5571, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 2.2042360060514374, |
|
"grad_norm": 0.7047929167747498, |
|
"learning_rate": 3.8579961822351856e-06, |
|
"loss": 0.5659, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 2.208774583963691, |
|
"grad_norm": 0.6957628130912781, |
|
"learning_rate": 3.852586573524663e-06, |
|
"loss": 0.5478, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 2.2133131618759454, |
|
"grad_norm": 1.1220340728759766, |
|
"learning_rate": 3.847167995795716e-06, |
|
"loss": 0.5478, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 2.2178517397881996, |
|
"grad_norm": 1.4658353328704834, |
|
"learning_rate": 3.841740484979002e-06, |
|
"loss": 0.5418, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.222390317700454, |
|
"grad_norm": 0.7847384810447693, |
|
"learning_rate": 3.836304077064412e-06, |
|
"loss": 0.5784, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 2.226928895612708, |
|
"grad_norm": 1.5285112857818604, |
|
"learning_rate": 3.830858808100835e-06, |
|
"loss": 0.5449, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.231467473524962, |
|
"grad_norm": 0.6902230978012085, |
|
"learning_rate": 3.825404714195917e-06, |
|
"loss": 0.567, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.2360060514372164, |
|
"grad_norm": 0.8884925842285156, |
|
"learning_rate": 3.819941831515825e-06, |
|
"loss": 0.5181, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.2405446293494706, |
|
"grad_norm": 1.5801842212677002, |
|
"learning_rate": 3.8144701962849973e-06, |
|
"loss": 0.5377, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.245083207261725, |
|
"grad_norm": 0.7058039307594299, |
|
"learning_rate": 3.80898984478592e-06, |
|
"loss": 0.5685, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.2496217851739786, |
|
"grad_norm": 0.6729607582092285, |
|
"learning_rate": 3.803500813358869e-06, |
|
"loss": 0.563, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.254160363086233, |
|
"grad_norm": 0.8975954055786133, |
|
"learning_rate": 3.7980031384016826e-06, |
|
"loss": 0.5865, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.258698940998487, |
|
"grad_norm": 1.0153331756591797, |
|
"learning_rate": 3.79249685636951e-06, |
|
"loss": 0.5611, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.263237518910741, |
|
"grad_norm": 0.6788516640663147, |
|
"learning_rate": 3.7869820037745773e-06, |
|
"loss": 0.5417, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.2677760968229954, |
|
"grad_norm": 0.9200128316879272, |
|
"learning_rate": 3.7814586171859397e-06, |
|
"loss": 0.5621, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.2723146747352496, |
|
"grad_norm": 2.8803627490997314, |
|
"learning_rate": 3.775926733229243e-06, |
|
"loss": 0.5593, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.276853252647504, |
|
"grad_norm": 2.3636279106140137, |
|
"learning_rate": 3.770386388586479e-06, |
|
"loss": 0.5893, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.281391830559758, |
|
"grad_norm": 0.8512002825737, |
|
"learning_rate": 3.7648376199957416e-06, |
|
"loss": 0.5411, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.2859304084720122, |
|
"grad_norm": 1.220920205116272, |
|
"learning_rate": 3.7592804642509844e-06, |
|
"loss": 0.5077, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.2904689863842664, |
|
"grad_norm": 1.1704233884811401, |
|
"learning_rate": 3.7537149582017764e-06, |
|
"loss": 0.5412, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.2950075642965206, |
|
"grad_norm": 1.7733300924301147, |
|
"learning_rate": 3.7481411387530577e-06, |
|
"loss": 0.5297, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.2995461422087744, |
|
"grad_norm": 0.6807821393013, |
|
"learning_rate": 3.742559042864895e-06, |
|
"loss": 0.5729, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.3040847201210286, |
|
"grad_norm": 1.166459083557129, |
|
"learning_rate": 3.7369687075522355e-06, |
|
"loss": 0.528, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.308623298033283, |
|
"grad_norm": 0.7100872993469238, |
|
"learning_rate": 3.7313701698846616e-06, |
|
"loss": 0.5686, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.313161875945537, |
|
"grad_norm": 0.9993472099304199, |
|
"learning_rate": 3.725763466986147e-06, |
|
"loss": 0.5676, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.317700453857791, |
|
"grad_norm": 0.889721155166626, |
|
"learning_rate": 3.7201486360348075e-06, |
|
"loss": 0.5261, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.3222390317700454, |
|
"grad_norm": 0.6611590385437012, |
|
"learning_rate": 3.714525714262659e-06, |
|
"loss": 0.5292, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.3267776096822996, |
|
"grad_norm": 0.8129754662513733, |
|
"learning_rate": 3.708894738955364e-06, |
|
"loss": 0.5556, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.331316187594554, |
|
"grad_norm": 0.797924816608429, |
|
"learning_rate": 3.703255747451991e-06, |
|
"loss": 0.5294, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.335854765506808, |
|
"grad_norm": 0.7335793972015381, |
|
"learning_rate": 3.697608777144762e-06, |
|
"loss": 0.5144, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.340393343419062, |
|
"grad_norm": 0.70816570520401, |
|
"learning_rate": 3.691953865478809e-06, |
|
"loss": 0.5649, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.344931921331316, |
|
"grad_norm": 0.916517436504364, |
|
"learning_rate": 3.6862910499519204e-06, |
|
"loss": 0.5721, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.34947049924357, |
|
"grad_norm": 0.9996108412742615, |
|
"learning_rate": 3.680620368114297e-06, |
|
"loss": 0.5427, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.3540090771558244, |
|
"grad_norm": 0.7871035933494568, |
|
"learning_rate": 3.6749418575683005e-06, |
|
"loss": 0.5549, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.3585476550680786, |
|
"grad_norm": 0.7144160270690918, |
|
"learning_rate": 3.6692555559682052e-06, |
|
"loss": 0.5779, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.363086232980333, |
|
"grad_norm": 0.6424504518508911, |
|
"learning_rate": 3.6635615010199484e-06, |
|
"loss": 0.5392, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.367624810892587, |
|
"grad_norm": 1.4431594610214233, |
|
"learning_rate": 3.6578597304808784e-06, |
|
"loss": 0.5398, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.3721633888048412, |
|
"grad_norm": 0.834149181842804, |
|
"learning_rate": 3.6521502821595067e-06, |
|
"loss": 0.5421, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.3767019667170954, |
|
"grad_norm": 0.802004873752594, |
|
"learning_rate": 3.6464331939152576e-06, |
|
"loss": 0.5086, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.3812405446293496, |
|
"grad_norm": 0.73563152551651, |
|
"learning_rate": 3.6407085036582134e-06, |
|
"loss": 0.5406, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.385779122541604, |
|
"grad_norm": 1.410982608795166, |
|
"learning_rate": 3.634976249348867e-06, |
|
"loss": 0.568, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.3903177004538576, |
|
"grad_norm": 0.8028589487075806, |
|
"learning_rate": 3.629236468997868e-06, |
|
"loss": 0.5085, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.394856278366112, |
|
"grad_norm": 0.8530763387680054, |
|
"learning_rate": 3.6234892006657716e-06, |
|
"loss": 0.5324, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.399394856278366, |
|
"grad_norm": 0.8291765451431274, |
|
"learning_rate": 3.6177344824627854e-06, |
|
"loss": 0.5593, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.40393343419062, |
|
"grad_norm": 0.7214178442955017, |
|
"learning_rate": 3.6119723525485173e-06, |
|
"loss": 0.5302, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.4084720121028744, |
|
"grad_norm": 0.9748385548591614, |
|
"learning_rate": 3.606202849131723e-06, |
|
"loss": 0.5362, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.4130105900151286, |
|
"grad_norm": 0.8343231678009033, |
|
"learning_rate": 3.600426010470051e-06, |
|
"loss": 0.5762, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.417549167927383, |
|
"grad_norm": 1.3052794933319092, |
|
"learning_rate": 3.594641874869792e-06, |
|
"loss": 0.5693, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.422087745839637, |
|
"grad_norm": 0.842367947101593, |
|
"learning_rate": 3.5888504806856194e-06, |
|
"loss": 0.5332, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.4266263237518912, |
|
"grad_norm": 0.7482140064239502, |
|
"learning_rate": 3.5830518663203412e-06, |
|
"loss": 0.5556, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.431164901664145, |
|
"grad_norm": 0.7388250827789307, |
|
"learning_rate": 3.5772460702246415e-06, |
|
"loss": 0.5375, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.435703479576399, |
|
"grad_norm": 0.78763747215271, |
|
"learning_rate": 3.5714331308968257e-06, |
|
"loss": 0.5476, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.4402420574886534, |
|
"grad_norm": 0.7993074059486389, |
|
"learning_rate": 3.5656130868825677e-06, |
|
"loss": 0.5168, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.4447806354009076, |
|
"grad_norm": 0.7094533443450928, |
|
"learning_rate": 3.5597859767746524e-06, |
|
"loss": 0.5483, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.449319213313162, |
|
"grad_norm": 0.6727566123008728, |
|
"learning_rate": 3.553951839212718e-06, |
|
"loss": 0.5623, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.453857791225416, |
|
"grad_norm": 0.8603148460388184, |
|
"learning_rate": 3.548110712883005e-06, |
|
"loss": 0.5737, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.4583963691376702, |
|
"grad_norm": 0.9948770999908447, |
|
"learning_rate": 3.5422626365180936e-06, |
|
"loss": 0.5717, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.4629349470499244, |
|
"grad_norm": 0.9731677174568176, |
|
"learning_rate": 3.5364076488966516e-06, |
|
"loss": 0.558, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.4674735249621786, |
|
"grad_norm": 1.0563665628433228, |
|
"learning_rate": 3.5305457888431747e-06, |
|
"loss": 0.5143, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.472012102874433, |
|
"grad_norm": 0.7944256663322449, |
|
"learning_rate": 3.5246770952277302e-06, |
|
"loss": 0.523, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.476550680786687, |
|
"grad_norm": 9.147004127502441, |
|
"learning_rate": 3.5188016069656986e-06, |
|
"loss": 0.5638, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 2.481089258698941, |
|
"grad_norm": 5.153329372406006, |
|
"learning_rate": 3.512919363017516e-06, |
|
"loss": 0.5689, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 2.485627836611195, |
|
"grad_norm": 3.699370861053467, |
|
"learning_rate": 3.5070304023884154e-06, |
|
"loss": 0.5571, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.4901664145234492, |
|
"grad_norm": 1.9909933805465698, |
|
"learning_rate": 3.501134764128167e-06, |
|
"loss": 0.5493, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.4947049924357034, |
|
"grad_norm": 0.8108904957771301, |
|
"learning_rate": 3.495232487330822e-06, |
|
"loss": 0.5545, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 2.4992435703479576, |
|
"grad_norm": 1.2555493116378784, |
|
"learning_rate": 3.489323611134452e-06, |
|
"loss": 0.5634, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 2.503782148260212, |
|
"grad_norm": 1.1967525482177734, |
|
"learning_rate": 3.4834081747208888e-06, |
|
"loss": 0.5767, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 2.508320726172466, |
|
"grad_norm": 1.1422444581985474, |
|
"learning_rate": 3.477486217315464e-06, |
|
"loss": 0.5774, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 2.5128593040847202, |
|
"grad_norm": 0.7501647472381592, |
|
"learning_rate": 3.4715577781867516e-06, |
|
"loss": 0.5301, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.517397881996974, |
|
"grad_norm": 0.6840022802352905, |
|
"learning_rate": 3.465622896646305e-06, |
|
"loss": 0.5548, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 2.521936459909228, |
|
"grad_norm": 0.7143796682357788, |
|
"learning_rate": 3.4596816120483985e-06, |
|
"loss": 0.4968, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 2.5264750378214824, |
|
"grad_norm": 0.7957927584648132, |
|
"learning_rate": 3.453733963789764e-06, |
|
"loss": 0.561, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.5310136157337366, |
|
"grad_norm": 2.5668840408325195, |
|
"learning_rate": 3.4477799913093303e-06, |
|
"loss": 0.5766, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 2.535552193645991, |
|
"grad_norm": 0.8086560368537903, |
|
"learning_rate": 3.441819734087963e-06, |
|
"loss": 0.5794, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.540090771558245, |
|
"grad_norm": 0.7480419874191284, |
|
"learning_rate": 3.4358532316482037e-06, |
|
"loss": 0.5783, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.5446293494704992, |
|
"grad_norm": 0.6926214098930359, |
|
"learning_rate": 3.4298805235540033e-06, |
|
"loss": 0.5488, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.5491679273827534, |
|
"grad_norm": 0.6664040088653564, |
|
"learning_rate": 3.4239016494104636e-06, |
|
"loss": 0.5399, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 2.5537065052950076, |
|
"grad_norm": 1.0281089544296265, |
|
"learning_rate": 3.417916648863574e-06, |
|
"loss": 0.5698, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 2.558245083207262, |
|
"grad_norm": 1.0963114500045776, |
|
"learning_rate": 3.411925561599947e-06, |
|
"loss": 0.5392, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.562783661119516, |
|
"grad_norm": 0.7198578715324402, |
|
"learning_rate": 3.405928427346557e-06, |
|
"loss": 0.5643, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 2.5673222390317703, |
|
"grad_norm": 0.8547159433364868, |
|
"learning_rate": 3.3999252858704775e-06, |
|
"loss": 0.5077, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 2.5718608169440245, |
|
"grad_norm": 0.7263000011444092, |
|
"learning_rate": 3.3939161769786124e-06, |
|
"loss": 0.496, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 2.5763993948562782, |
|
"grad_norm": 0.6732318997383118, |
|
"learning_rate": 3.387901140517438e-06, |
|
"loss": 0.5575, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 2.5809379727685324, |
|
"grad_norm": 1.9751555919647217, |
|
"learning_rate": 3.3818802163727377e-06, |
|
"loss": 0.5257, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.5854765506807866, |
|
"grad_norm": 0.7534604668617249, |
|
"learning_rate": 3.3758534444693323e-06, |
|
"loss": 0.5367, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 2.590015128593041, |
|
"grad_norm": 0.6703462600708008, |
|
"learning_rate": 3.3698208647708226e-06, |
|
"loss": 0.5362, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 2.594553706505295, |
|
"grad_norm": 1.8879098892211914, |
|
"learning_rate": 3.36378251727932e-06, |
|
"loss": 0.5571, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 2.5990922844175492, |
|
"grad_norm": 0.6916099786758423, |
|
"learning_rate": 3.357738442035181e-06, |
|
"loss": 0.5411, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 2.6036308623298035, |
|
"grad_norm": 0.7827025651931763, |
|
"learning_rate": 3.3516886791167446e-06, |
|
"loss": 0.5316, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.608169440242057, |
|
"grad_norm": 1.1546365022659302, |
|
"learning_rate": 3.345633268640064e-06, |
|
"loss": 0.5164, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 2.6127080181543114, |
|
"grad_norm": 1.0985122919082642, |
|
"learning_rate": 3.3395722507586413e-06, |
|
"loss": 0.592, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 2.6172465960665656, |
|
"grad_norm": 0.7895015478134155, |
|
"learning_rate": 3.333505665663162e-06, |
|
"loss": 0.5393, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 2.62178517397882, |
|
"grad_norm": 0.8720937967300415, |
|
"learning_rate": 3.327433553581227e-06, |
|
"loss": 0.5474, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 2.626323751891074, |
|
"grad_norm": 0.7074962854385376, |
|
"learning_rate": 3.3213559547770873e-06, |
|
"loss": 0.562, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.6308623298033282, |
|
"grad_norm": 1.0457886457443237, |
|
"learning_rate": 3.3152729095513762e-06, |
|
"loss": 0.5188, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 2.6354009077155824, |
|
"grad_norm": 1.1377204656600952, |
|
"learning_rate": 3.309184458240843e-06, |
|
"loss": 0.548, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 2.6399394856278366, |
|
"grad_norm": 1.2311692237854004, |
|
"learning_rate": 3.303090641218083e-06, |
|
"loss": 0.5613, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 2.644478063540091, |
|
"grad_norm": 0.7034773826599121, |
|
"learning_rate": 3.2969914988912746e-06, |
|
"loss": 0.5469, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 2.649016641452345, |
|
"grad_norm": 0.9577970504760742, |
|
"learning_rate": 3.290887071703905e-06, |
|
"loss": 0.5377, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.6535552193645993, |
|
"grad_norm": 0.9517496228218079, |
|
"learning_rate": 3.284777400134507e-06, |
|
"loss": 0.5616, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.6580937972768535, |
|
"grad_norm": 0.8925998210906982, |
|
"learning_rate": 3.2786625246963903e-06, |
|
"loss": 0.539, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 2.6626323751891077, |
|
"grad_norm": 0.7537391185760498, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 0.5388, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 2.6671709531013614, |
|
"grad_norm": 0.8499334454536438, |
|
"learning_rate": 3.2664173244394965e-06, |
|
"loss": 0.5409, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 2.6717095310136156, |
|
"grad_norm": 1.3763985633850098, |
|
"learning_rate": 3.2602870808187955e-06, |
|
"loss": 0.5425, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.67624810892587, |
|
"grad_norm": 0.7515591382980347, |
|
"learning_rate": 3.2541517957249868e-06, |
|
"loss": 0.5623, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 2.680786686838124, |
|
"grad_norm": 0.7507291436195374, |
|
"learning_rate": 3.2480115098412234e-06, |
|
"loss": 0.5335, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 2.6853252647503782, |
|
"grad_norm": 0.7022225856781006, |
|
"learning_rate": 3.2418662638838166e-06, |
|
"loss": 0.5536, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 2.6898638426626325, |
|
"grad_norm": 0.7832311987876892, |
|
"learning_rate": 3.2357160986019697e-06, |
|
"loss": 0.5606, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.6944024205748867, |
|
"grad_norm": 1.096700668334961, |
|
"learning_rate": 3.2295610547775054e-06, |
|
"loss": 0.5602, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.6989409984871404, |
|
"grad_norm": 0.837489902973175, |
|
"learning_rate": 3.2234011732245953e-06, |
|
"loss": 0.5241, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.7034795763993946, |
|
"grad_norm": 0.7516577839851379, |
|
"learning_rate": 3.2172364947894914e-06, |
|
"loss": 0.5557, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 2.708018154311649, |
|
"grad_norm": 1.0745545625686646, |
|
"learning_rate": 3.211067060350253e-06, |
|
"loss": 0.5634, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.712556732223903, |
|
"grad_norm": 1.0247219800949097, |
|
"learning_rate": 3.204892910816476e-06, |
|
"loss": 0.5323, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.7170953101361572, |
|
"grad_norm": 0.7615066766738892, |
|
"learning_rate": 3.198714087129024e-06, |
|
"loss": 0.5474, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.7216338880484114, |
|
"grad_norm": 0.8073952198028564, |
|
"learning_rate": 3.1925306302597535e-06, |
|
"loss": 0.5344, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 2.7261724659606656, |
|
"grad_norm": 1.2883327007293701, |
|
"learning_rate": 3.1863425812112437e-06, |
|
"loss": 0.5876, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.73071104387292, |
|
"grad_norm": 2.0846781730651855, |
|
"learning_rate": 3.1801499810165254e-06, |
|
"loss": 0.5452, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 2.735249621785174, |
|
"grad_norm": 0.8865274786949158, |
|
"learning_rate": 3.1739528707388066e-06, |
|
"loss": 0.5386, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 2.7397881996974283, |
|
"grad_norm": 1.5364603996276855, |
|
"learning_rate": 3.1677512914712044e-06, |
|
"loss": 0.5549, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.7443267776096825, |
|
"grad_norm": 0.7343050837516785, |
|
"learning_rate": 3.1615452843364674e-06, |
|
"loss": 0.5359, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 2.7488653555219367, |
|
"grad_norm": 0.9987273812294006, |
|
"learning_rate": 3.155334890486707e-06, |
|
"loss": 0.5584, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 2.753403933434191, |
|
"grad_norm": 1.2430927753448486, |
|
"learning_rate": 3.149120151103121e-06, |
|
"loss": 0.5835, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 2.7579425113464446, |
|
"grad_norm": 0.901343047618866, |
|
"learning_rate": 3.142901107395724e-06, |
|
"loss": 0.5629, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 2.762481089258699, |
|
"grad_norm": 1.0777499675750732, |
|
"learning_rate": 3.1366778006030717e-06, |
|
"loss": 0.5638, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.767019667170953, |
|
"grad_norm": 0.6995704174041748, |
|
"learning_rate": 3.130450271991991e-06, |
|
"loss": 0.5628, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 2.7715582450832073, |
|
"grad_norm": 0.8021388053894043, |
|
"learning_rate": 3.1242185628573e-06, |
|
"loss": 0.5471, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 2.7760968229954615, |
|
"grad_norm": 1.0073556900024414, |
|
"learning_rate": 3.117982714521541e-06, |
|
"loss": 0.5375, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 2.7806354009077157, |
|
"grad_norm": 1.3955570459365845, |
|
"learning_rate": 3.1117427683347003e-06, |
|
"loss": 0.5481, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 2.78517397881997, |
|
"grad_norm": 0.7922578454017639, |
|
"learning_rate": 3.1054987656739395e-06, |
|
"loss": 0.5338, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.789712556732224, |
|
"grad_norm": 0.6501929759979248, |
|
"learning_rate": 3.0992507479433193e-06, |
|
"loss": 0.5076, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 2.794251134644478, |
|
"grad_norm": 1.006299614906311, |
|
"learning_rate": 3.0929987565735214e-06, |
|
"loss": 0.555, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 2.798789712556732, |
|
"grad_norm": 0.7578917145729065, |
|
"learning_rate": 3.0867428330215793e-06, |
|
"loss": 0.5359, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 2.8033282904689862, |
|
"grad_norm": 0.828425943851471, |
|
"learning_rate": 3.0804830187706005e-06, |
|
"loss": 0.5308, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 2.8078668683812404, |
|
"grad_norm": 0.7022498846054077, |
|
"learning_rate": 3.0742193553294896e-06, |
|
"loss": 0.5861, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.8124054462934946, |
|
"grad_norm": 0.6671915054321289, |
|
"learning_rate": 3.067951884232678e-06, |
|
"loss": 0.5764, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 2.816944024205749, |
|
"grad_norm": 0.9621557593345642, |
|
"learning_rate": 3.0616806470398453e-06, |
|
"loss": 0.5567, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 2.821482602118003, |
|
"grad_norm": 0.7543350458145142, |
|
"learning_rate": 3.055405685335643e-06, |
|
"loss": 0.5249, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 2.8260211800302573, |
|
"grad_norm": 0.8421291708946228, |
|
"learning_rate": 3.0491270407294195e-06, |
|
"loss": 0.5657, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 2.8305597579425115, |
|
"grad_norm": 0.7241919636726379, |
|
"learning_rate": 3.0428447548549466e-06, |
|
"loss": 0.5679, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.8350983358547657, |
|
"grad_norm": 0.7716313004493713, |
|
"learning_rate": 3.03655886937014e-06, |
|
"loss": 0.562, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 2.83963691376702, |
|
"grad_norm": 2.4703383445739746, |
|
"learning_rate": 3.030269425956784e-06, |
|
"loss": 0.5356, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 2.844175491679274, |
|
"grad_norm": 1.180924415588379, |
|
"learning_rate": 3.0239764663202565e-06, |
|
"loss": 0.5619, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 2.848714069591528, |
|
"grad_norm": 0.7013900876045227, |
|
"learning_rate": 3.017680032189252e-06, |
|
"loss": 0.5415, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 2.853252647503782, |
|
"grad_norm": 1.6009024381637573, |
|
"learning_rate": 3.011380165315503e-06, |
|
"loss": 0.5618, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.8577912254160363, |
|
"grad_norm": 1.0912448167800903, |
|
"learning_rate": 3.005076907473505e-06, |
|
"loss": 0.5821, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 2.8623298033282905, |
|
"grad_norm": 0.8813812732696533, |
|
"learning_rate": 2.9987703004602394e-06, |
|
"loss": 0.5405, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 2.8668683812405447, |
|
"grad_norm": 0.8485713601112366, |
|
"learning_rate": 2.9924603860948963e-06, |
|
"loss": 0.5189, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 2.871406959152799, |
|
"grad_norm": 0.6998408436775208, |
|
"learning_rate": 2.986147206218597e-06, |
|
"loss": 0.5483, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 2.875945537065053, |
|
"grad_norm": 0.7725822925567627, |
|
"learning_rate": 2.9798308026941147e-06, |
|
"loss": 0.5553, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.8804841149773073, |
|
"grad_norm": 1.0571022033691406, |
|
"learning_rate": 2.973511217405601e-06, |
|
"loss": 0.5628, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 2.885022692889561, |
|
"grad_norm": 2.2442429065704346, |
|
"learning_rate": 2.967188492258304e-06, |
|
"loss": 0.567, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 2.8895612708018152, |
|
"grad_norm": 1.3726474046707153, |
|
"learning_rate": 2.9608626691782927e-06, |
|
"loss": 0.546, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 2.8940998487140694, |
|
"grad_norm": 0.6917457580566406, |
|
"learning_rate": 2.9545337901121796e-06, |
|
"loss": 0.5322, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 2.8986384266263236, |
|
"grad_norm": 0.8719446659088135, |
|
"learning_rate": 2.9482018970268395e-06, |
|
"loss": 0.4992, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.903177004538578, |
|
"grad_norm": 0.8265273571014404, |
|
"learning_rate": 2.941867031909136e-06, |
|
"loss": 0.5641, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 2.907715582450832, |
|
"grad_norm": 0.9295107126235962, |
|
"learning_rate": 2.9355292367656363e-06, |
|
"loss": 0.5269, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 2.9122541603630863, |
|
"grad_norm": 1.0216796398162842, |
|
"learning_rate": 2.9291885536223415e-06, |
|
"loss": 0.543, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 2.9167927382753405, |
|
"grad_norm": 0.7285480499267578, |
|
"learning_rate": 2.9228450245243994e-06, |
|
"loss": 0.5021, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 2.9213313161875947, |
|
"grad_norm": 1.5654610395431519, |
|
"learning_rate": 2.91649869153583e-06, |
|
"loss": 0.5414, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.925869894099849, |
|
"grad_norm": 0.9667910933494568, |
|
"learning_rate": 2.910149596739248e-06, |
|
"loss": 0.5309, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 2.930408472012103, |
|
"grad_norm": 0.7517403364181519, |
|
"learning_rate": 2.9037977822355783e-06, |
|
"loss": 0.5385, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 2.9349470499243573, |
|
"grad_norm": 0.9054082036018372, |
|
"learning_rate": 2.8974432901437827e-06, |
|
"loss": 0.5404, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 2.939485627836611, |
|
"grad_norm": 0.8233144879341125, |
|
"learning_rate": 2.8910861626005774e-06, |
|
"loss": 0.5533, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 2.9440242057488653, |
|
"grad_norm": 0.7188256978988647, |
|
"learning_rate": 2.884726441760155e-06, |
|
"loss": 0.5448, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.9485627836611195, |
|
"grad_norm": 0.8374635577201843, |
|
"learning_rate": 2.878364169793903e-06, |
|
"loss": 0.5596, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 2.9531013615733737, |
|
"grad_norm": 1.4161272048950195, |
|
"learning_rate": 2.871999388890126e-06, |
|
"loss": 0.5863, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 2.957639939485628, |
|
"grad_norm": 1.0405840873718262, |
|
"learning_rate": 2.8656321412537653e-06, |
|
"loss": 0.5196, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 2.962178517397882, |
|
"grad_norm": 0.9523102641105652, |
|
"learning_rate": 2.85926246910612e-06, |
|
"loss": 0.5287, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 2.9667170953101363, |
|
"grad_norm": 1.1815980672836304, |
|
"learning_rate": 2.8528904146845652e-06, |
|
"loss": 0.5453, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.9712556732223905, |
|
"grad_norm": 2.321892499923706, |
|
"learning_rate": 2.8465160202422737e-06, |
|
"loss": 0.5703, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 2.9757942511346442, |
|
"grad_norm": 0.9072638750076294, |
|
"learning_rate": 2.840139328047934e-06, |
|
"loss": 0.5634, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 2.9803328290468984, |
|
"grad_norm": 0.8406242728233337, |
|
"learning_rate": 2.8337603803854713e-06, |
|
"loss": 0.5464, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 2.9848714069591527, |
|
"grad_norm": 0.9542201161384583, |
|
"learning_rate": 2.8273792195537663e-06, |
|
"loss": 0.5129, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 2.989409984871407, |
|
"grad_norm": 0.9052215814590454, |
|
"learning_rate": 2.820995887866378e-06, |
|
"loss": 0.5462, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.993948562783661, |
|
"grad_norm": 1.0007253885269165, |
|
"learning_rate": 2.8146104276512565e-06, |
|
"loss": 0.555, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 2.9984871406959153, |
|
"grad_norm": 0.9045431613922119, |
|
"learning_rate": 2.8082228812504693e-06, |
|
"loss": 0.5542, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.7561860084533691, |
|
"learning_rate": 2.801833291019915e-06, |
|
"loss": 0.1716, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 3.004538577912254, |
|
"grad_norm": 1.0536010265350342, |
|
"learning_rate": 2.7954416993290474e-06, |
|
"loss": 0.5426, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 3.0090771558245084, |
|
"grad_norm": 1.148224949836731, |
|
"learning_rate": 2.7890481485605898e-06, |
|
"loss": 0.531, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 3.0136157337367626, |
|
"grad_norm": 1.2489687204360962, |
|
"learning_rate": 2.7826526811102577e-06, |
|
"loss": 0.5283, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 3.018154311649017, |
|
"grad_norm": 0.7956582903862, |
|
"learning_rate": 2.7762553393864743e-06, |
|
"loss": 0.5213, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 3.022692889561271, |
|
"grad_norm": 1.8829700946807861, |
|
"learning_rate": 2.769856165810093e-06, |
|
"loss": 0.5051, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 3.027231467473525, |
|
"grad_norm": 0.6515493392944336, |
|
"learning_rate": 2.7634552028141137e-06, |
|
"loss": 0.5136, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 3.031770045385779, |
|
"grad_norm": 0.8646936416625977, |
|
"learning_rate": 2.757052492843401e-06, |
|
"loss": 0.5044, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.036308623298033, |
|
"grad_norm": 0.7098562717437744, |
|
"learning_rate": 2.750648078354406e-06, |
|
"loss": 0.5072, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 3.0408472012102874, |
|
"grad_norm": 0.7068854570388794, |
|
"learning_rate": 2.7442420018148797e-06, |
|
"loss": 0.4857, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 3.0453857791225416, |
|
"grad_norm": 0.8640517592430115, |
|
"learning_rate": 2.7378343057035956e-06, |
|
"loss": 0.5595, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 3.049924357034796, |
|
"grad_norm": 0.6892681121826172, |
|
"learning_rate": 2.7314250325100667e-06, |
|
"loss": 0.5237, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 3.05446293494705, |
|
"grad_norm": 0.6955535411834717, |
|
"learning_rate": 2.7250142247342637e-06, |
|
"loss": 0.5219, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.059001512859304, |
|
"grad_norm": 0.8527218103408813, |
|
"learning_rate": 2.718601924886332e-06, |
|
"loss": 0.5392, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 3.0635400907715584, |
|
"grad_norm": 0.7058966159820557, |
|
"learning_rate": 2.7121881754863126e-06, |
|
"loss": 0.5035, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 3.068078668683812, |
|
"grad_norm": 1.1376885175704956, |
|
"learning_rate": 2.7057730190638575e-06, |
|
"loss": 0.4946, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 3.0726172465960664, |
|
"grad_norm": 0.8086661100387573, |
|
"learning_rate": 2.699356498157949e-06, |
|
"loss": 0.4892, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 3.0771558245083206, |
|
"grad_norm": 0.6907851696014404, |
|
"learning_rate": 2.6929386553166165e-06, |
|
"loss": 0.5366, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.081694402420575, |
|
"grad_norm": 0.6702793836593628, |
|
"learning_rate": 2.686519533096656e-06, |
|
"loss": 0.497, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 3.086232980332829, |
|
"grad_norm": 1.3654205799102783, |
|
"learning_rate": 2.680099174063348e-06, |
|
"loss": 0.5224, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 3.090771558245083, |
|
"grad_norm": 0.7771999835968018, |
|
"learning_rate": 2.673677620790172e-06, |
|
"loss": 0.5089, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 3.0953101361573374, |
|
"grad_norm": 0.8415461182594299, |
|
"learning_rate": 2.667254915858529e-06, |
|
"loss": 0.5286, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 3.0998487140695916, |
|
"grad_norm": 0.8293631076812744, |
|
"learning_rate": 2.6608311018574545e-06, |
|
"loss": 0.4751, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.104387291981846, |
|
"grad_norm": 1.1603025197982788, |
|
"learning_rate": 2.6544062213833395e-06, |
|
"loss": 0.486, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 3.1089258698941, |
|
"grad_norm": 0.7829399704933167, |
|
"learning_rate": 2.647980317039646e-06, |
|
"loss": 0.5243, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 3.1134644478063542, |
|
"grad_norm": 0.6867077946662903, |
|
"learning_rate": 2.6415534314366264e-06, |
|
"loss": 0.519, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 3.118003025718608, |
|
"grad_norm": 0.8667089939117432, |
|
"learning_rate": 2.635125607191039e-06, |
|
"loss": 0.4992, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 3.122541603630862, |
|
"grad_norm": 0.8637856841087341, |
|
"learning_rate": 2.6286968869258666e-06, |
|
"loss": 0.5129, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.1270801815431164, |
|
"grad_norm": 0.6830906867980957, |
|
"learning_rate": 2.6222673132700335e-06, |
|
"loss": 0.524, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 3.1316187594553706, |
|
"grad_norm": 0.8948183059692383, |
|
"learning_rate": 2.615836928858122e-06, |
|
"loss": 0.5153, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 3.136157337367625, |
|
"grad_norm": 0.7270144820213318, |
|
"learning_rate": 2.609405776330092e-06, |
|
"loss": 0.5076, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 3.140695915279879, |
|
"grad_norm": 0.6997873783111572, |
|
"learning_rate": 2.6029738983309954e-06, |
|
"loss": 0.5008, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 3.145234493192133, |
|
"grad_norm": 0.7153282165527344, |
|
"learning_rate": 2.5965413375106965e-06, |
|
"loss": 0.5356, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 3.1497730711043874, |
|
"grad_norm": 0.8726050853729248, |
|
"learning_rate": 2.5901081365235852e-06, |
|
"loss": 0.5031, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 3.1543116490166416, |
|
"grad_norm": 0.9993102550506592, |
|
"learning_rate": 2.583674338028298e-06, |
|
"loss": 0.5487, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 3.1588502269288954, |
|
"grad_norm": 1.1097780466079712, |
|
"learning_rate": 2.5772399846874323e-06, |
|
"loss": 0.5031, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 3.1633888048411496, |
|
"grad_norm": 0.876848578453064, |
|
"learning_rate": 2.5708051191672658e-06, |
|
"loss": 0.5504, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 3.167927382753404, |
|
"grad_norm": 0.7725114226341248, |
|
"learning_rate": 2.5643697841374722e-06, |
|
"loss": 0.48, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.172465960665658, |
|
"grad_norm": 0.9931405186653137, |
|
"learning_rate": 2.557934022270837e-06, |
|
"loss": 0.5078, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 3.177004538577912, |
|
"grad_norm": 0.8958150744438171, |
|
"learning_rate": 2.551497876242978e-06, |
|
"loss": 0.5278, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 3.1815431164901664, |
|
"grad_norm": 1.0840779542922974, |
|
"learning_rate": 2.5450613887320606e-06, |
|
"loss": 0.5126, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 3.1860816944024206, |
|
"grad_norm": 0.6236334443092346, |
|
"learning_rate": 2.538624602418513e-06, |
|
"loss": 0.4677, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 3.190620272314675, |
|
"grad_norm": 0.731157660484314, |
|
"learning_rate": 2.5321875599847456e-06, |
|
"loss": 0.5218, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 3.195158850226929, |
|
"grad_norm": 0.8157845139503479, |
|
"learning_rate": 2.525750304114867e-06, |
|
"loss": 0.5423, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 3.1996974281391832, |
|
"grad_norm": 1.5604270696640015, |
|
"learning_rate": 2.519312877494401e-06, |
|
"loss": 0.4742, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 3.2042360060514374, |
|
"grad_norm": 0.7842020988464355, |
|
"learning_rate": 2.512875322810002e-06, |
|
"loss": 0.5113, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 3.208774583963691, |
|
"grad_norm": 0.8386942744255066, |
|
"learning_rate": 2.5064376827491786e-06, |
|
"loss": 0.4777, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 3.2133131618759454, |
|
"grad_norm": 2.208853244781494, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.5465, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.2178517397881996, |
|
"grad_norm": 0.8791889548301697, |
|
"learning_rate": 2.4935623172508223e-06, |
|
"loss": 0.5285, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 3.222390317700454, |
|
"grad_norm": 0.7482249140739441, |
|
"learning_rate": 2.4871246771899983e-06, |
|
"loss": 0.5327, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 3.226928895612708, |
|
"grad_norm": 0.6846868395805359, |
|
"learning_rate": 2.4806871225056006e-06, |
|
"loss": 0.5253, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 3.231467473524962, |
|
"grad_norm": 0.7808830738067627, |
|
"learning_rate": 2.474249695885134e-06, |
|
"loss": 0.5438, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 3.2360060514372164, |
|
"grad_norm": 0.7752459645271301, |
|
"learning_rate": 2.467812440015255e-06, |
|
"loss": 0.5232, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 3.2405446293494706, |
|
"grad_norm": 0.7170999646186829, |
|
"learning_rate": 2.461375397581487e-06, |
|
"loss": 0.4919, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 3.245083207261725, |
|
"grad_norm": 0.7586008906364441, |
|
"learning_rate": 2.4549386112679394e-06, |
|
"loss": 0.5218, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 3.2496217851739786, |
|
"grad_norm": 0.7302015423774719, |
|
"learning_rate": 2.448502123757022e-06, |
|
"loss": 0.5271, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 3.254160363086233, |
|
"grad_norm": 0.7475630044937134, |
|
"learning_rate": 2.4420659777291637e-06, |
|
"loss": 0.5189, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 3.258698940998487, |
|
"grad_norm": 0.6879778504371643, |
|
"learning_rate": 2.435630215862529e-06, |
|
"loss": 0.5322, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.263237518910741, |
|
"grad_norm": 0.7178695797920227, |
|
"learning_rate": 2.4291948808327346e-06, |
|
"loss": 0.511, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 3.2677760968229954, |
|
"grad_norm": 0.9109921455383301, |
|
"learning_rate": 2.422760015312568e-06, |
|
"loss": 0.4752, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 3.2723146747352496, |
|
"grad_norm": 0.6712206602096558, |
|
"learning_rate": 2.416325661971703e-06, |
|
"loss": 0.5074, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 3.276853252647504, |
|
"grad_norm": 1.5459402799606323, |
|
"learning_rate": 2.4098918634764156e-06, |
|
"loss": 0.502, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 3.281391830559758, |
|
"grad_norm": 0.7668531537055969, |
|
"learning_rate": 2.403458662489304e-06, |
|
"loss": 0.5012, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.2859304084720122, |
|
"grad_norm": 0.7114011645317078, |
|
"learning_rate": 2.397026101669005e-06, |
|
"loss": 0.5506, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 3.2904689863842664, |
|
"grad_norm": 0.749311625957489, |
|
"learning_rate": 2.3905942236699086e-06, |
|
"loss": 0.5321, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 3.2950075642965206, |
|
"grad_norm": 0.7871769070625305, |
|
"learning_rate": 2.3841630711418784e-06, |
|
"loss": 0.4805, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 3.2995461422087744, |
|
"grad_norm": 0.7565982937812805, |
|
"learning_rate": 2.377732686729967e-06, |
|
"loss": 0.5431, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 3.3040847201210286, |
|
"grad_norm": 0.8726269006729126, |
|
"learning_rate": 2.371303113074134e-06, |
|
"loss": 0.5153, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.308623298033283, |
|
"grad_norm": 0.7536921501159668, |
|
"learning_rate": 2.3648743928089612e-06, |
|
"loss": 0.4863, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 3.313161875945537, |
|
"grad_norm": 0.796808660030365, |
|
"learning_rate": 2.358446568563374e-06, |
|
"loss": 0.4908, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 3.317700453857791, |
|
"grad_norm": 0.7959126830101013, |
|
"learning_rate": 2.3520196829603547e-06, |
|
"loss": 0.5232, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 3.3222390317700454, |
|
"grad_norm": 1.1234790086746216, |
|
"learning_rate": 2.3455937786166613e-06, |
|
"loss": 0.5304, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 3.3267776096822996, |
|
"grad_norm": 0.687144935131073, |
|
"learning_rate": 2.3391688981425464e-06, |
|
"loss": 0.5027, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 3.331316187594554, |
|
"grad_norm": 0.7692601680755615, |
|
"learning_rate": 2.3327450841414716e-06, |
|
"loss": 0.5021, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 3.335854765506808, |
|
"grad_norm": 0.7073154449462891, |
|
"learning_rate": 2.3263223792098287e-06, |
|
"loss": 0.536, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 3.340393343419062, |
|
"grad_norm": 0.9052091240882874, |
|
"learning_rate": 2.3199008259366524e-06, |
|
"loss": 0.5473, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 3.344931921331316, |
|
"grad_norm": 0.8449652791023254, |
|
"learning_rate": 2.3134804669033437e-06, |
|
"loss": 0.5151, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 3.34947049924357, |
|
"grad_norm": 0.7386903166770935, |
|
"learning_rate": 2.3070613446833843e-06, |
|
"loss": 0.5218, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.3540090771558244, |
|
"grad_norm": 1.0674140453338623, |
|
"learning_rate": 2.300643501842052e-06, |
|
"loss": 0.4955, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 3.3585476550680786, |
|
"grad_norm": 0.6917946338653564, |
|
"learning_rate": 2.294226980936143e-06, |
|
"loss": 0.5357, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 3.363086232980333, |
|
"grad_norm": 0.735014021396637, |
|
"learning_rate": 2.287811824513688e-06, |
|
"loss": 0.5045, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 3.367624810892587, |
|
"grad_norm": 0.9542885422706604, |
|
"learning_rate": 2.2813980751136686e-06, |
|
"loss": 0.52, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 3.3721633888048412, |
|
"grad_norm": 0.8511250019073486, |
|
"learning_rate": 2.274985775265737e-06, |
|
"loss": 0.5107, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.3767019667170954, |
|
"grad_norm": 0.7510440945625305, |
|
"learning_rate": 2.2685749674899346e-06, |
|
"loss": 0.4872, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 3.3812405446293496, |
|
"grad_norm": 0.7399076819419861, |
|
"learning_rate": 2.262165694296406e-06, |
|
"loss": 0.5187, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 3.385779122541604, |
|
"grad_norm": 0.6950759887695312, |
|
"learning_rate": 2.255757998185122e-06, |
|
"loss": 0.5205, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 3.3903177004538576, |
|
"grad_norm": 0.736414909362793, |
|
"learning_rate": 2.2493519216455945e-06, |
|
"loss": 0.5092, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 3.394856278366112, |
|
"grad_norm": 1.004551649093628, |
|
"learning_rate": 2.242947507156599e-06, |
|
"loss": 0.5326, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.399394856278366, |
|
"grad_norm": 3.4429965019226074, |
|
"learning_rate": 2.2365447971858868e-06, |
|
"loss": 0.547, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 3.40393343419062, |
|
"grad_norm": 0.672024130821228, |
|
"learning_rate": 2.2301438341899073e-06, |
|
"loss": 0.4979, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 3.4084720121028744, |
|
"grad_norm": 0.8082073926925659, |
|
"learning_rate": 2.223744660613526e-06, |
|
"loss": 0.5099, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 3.4130105900151286, |
|
"grad_norm": 0.7335776090621948, |
|
"learning_rate": 2.217347318889743e-06, |
|
"loss": 0.5196, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 3.417549167927383, |
|
"grad_norm": 0.791305422782898, |
|
"learning_rate": 2.210951851439411e-06, |
|
"loss": 0.5072, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.422087745839637, |
|
"grad_norm": 0.8634244203567505, |
|
"learning_rate": 2.204558300670954e-06, |
|
"loss": 0.5095, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 3.4266263237518912, |
|
"grad_norm": 0.910862147808075, |
|
"learning_rate": 2.198166708980086e-06, |
|
"loss": 0.5334, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 3.431164901664145, |
|
"grad_norm": 0.685192346572876, |
|
"learning_rate": 2.191777118749532e-06, |
|
"loss": 0.5061, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 3.435703479576399, |
|
"grad_norm": 0.7736103534698486, |
|
"learning_rate": 2.185389572348745e-06, |
|
"loss": 0.5209, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 3.4402420574886534, |
|
"grad_norm": 2.14925217628479, |
|
"learning_rate": 2.1790041121336223e-06, |
|
"loss": 0.5325, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.4447806354009076, |
|
"grad_norm": 0.7848958969116211, |
|
"learning_rate": 2.1726207804462336e-06, |
|
"loss": 0.5217, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 3.449319213313162, |
|
"grad_norm": 0.8946971893310547, |
|
"learning_rate": 2.1662396196145295e-06, |
|
"loss": 0.5028, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 3.453857791225416, |
|
"grad_norm": 0.7946920394897461, |
|
"learning_rate": 2.1598606719520663e-06, |
|
"loss": 0.4991, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 3.4583963691376702, |
|
"grad_norm": 0.707175076007843, |
|
"learning_rate": 2.153483979757727e-06, |
|
"loss": 0.5172, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 3.4629349470499244, |
|
"grad_norm": 0.8119880557060242, |
|
"learning_rate": 2.147109585315435e-06, |
|
"loss": 0.4985, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.4674735249621786, |
|
"grad_norm": 0.8942325711250305, |
|
"learning_rate": 2.1407375308938807e-06, |
|
"loss": 0.4909, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 3.472012102874433, |
|
"grad_norm": 0.9520535469055176, |
|
"learning_rate": 2.134367858746236e-06, |
|
"loss": 0.5228, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 3.476550680786687, |
|
"grad_norm": 0.9031659364700317, |
|
"learning_rate": 2.1280006111098754e-06, |
|
"loss": 0.5083, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 3.481089258698941, |
|
"grad_norm": 0.9307310581207275, |
|
"learning_rate": 2.1216358302060987e-06, |
|
"loss": 0.5067, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 3.485627836611195, |
|
"grad_norm": 0.7787923812866211, |
|
"learning_rate": 2.1152735582398453e-06, |
|
"loss": 0.4929, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.4901664145234492, |
|
"grad_norm": 0.7476038932800293, |
|
"learning_rate": 2.1089138373994226e-06, |
|
"loss": 0.5082, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 3.4947049924357034, |
|
"grad_norm": 1.7294312715530396, |
|
"learning_rate": 2.1025567098562177e-06, |
|
"loss": 0.511, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 3.4992435703479576, |
|
"grad_norm": 1.9948171377182007, |
|
"learning_rate": 2.096202217764422e-06, |
|
"loss": 0.5264, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 3.503782148260212, |
|
"grad_norm": 0.7554813623428345, |
|
"learning_rate": 2.089850403260753e-06, |
|
"loss": 0.4951, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 3.508320726172466, |
|
"grad_norm": 0.8657441139221191, |
|
"learning_rate": 2.0835013084641704e-06, |
|
"loss": 0.4973, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.5128593040847202, |
|
"grad_norm": 1.0366365909576416, |
|
"learning_rate": 2.0771549754756014e-06, |
|
"loss": 0.5198, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 3.517397881996974, |
|
"grad_norm": 0.9612494707107544, |
|
"learning_rate": 2.070811446377659e-06, |
|
"loss": 0.4916, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 3.521936459909228, |
|
"grad_norm": 0.7001491785049438, |
|
"learning_rate": 2.064470763234364e-06, |
|
"loss": 0.5143, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 3.5264750378214824, |
|
"grad_norm": 0.7265450954437256, |
|
"learning_rate": 2.0581329680908654e-06, |
|
"loss": 0.4852, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 3.5310136157337366, |
|
"grad_norm": 0.7637912631034851, |
|
"learning_rate": 2.0517981029731613e-06, |
|
"loss": 0.5101, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.535552193645991, |
|
"grad_norm": 0.7350311279296875, |
|
"learning_rate": 2.045466209887821e-06, |
|
"loss": 0.4884, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 3.540090771558245, |
|
"grad_norm": 2.7524001598358154, |
|
"learning_rate": 2.0391373308217077e-06, |
|
"loss": 0.5007, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 3.5446293494704992, |
|
"grad_norm": 0.773992657661438, |
|
"learning_rate": 2.032811507741697e-06, |
|
"loss": 0.5144, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 3.5491679273827534, |
|
"grad_norm": 0.7631218433380127, |
|
"learning_rate": 2.0264887825944e-06, |
|
"loss": 0.5249, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 3.5537065052950076, |
|
"grad_norm": 0.8911067843437195, |
|
"learning_rate": 2.020169197305886e-06, |
|
"loss": 0.5218, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.558245083207262, |
|
"grad_norm": 0.6976955533027649, |
|
"learning_rate": 2.013852793781404e-06, |
|
"loss": 0.4844, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 3.562783661119516, |
|
"grad_norm": 0.768477201461792, |
|
"learning_rate": 2.007539613905104e-06, |
|
"loss": 0.5026, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 3.5673222390317703, |
|
"grad_norm": 0.6874233484268188, |
|
"learning_rate": 2.0012296995397614e-06, |
|
"loss": 0.5416, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 3.5718608169440245, |
|
"grad_norm": 0.9146207571029663, |
|
"learning_rate": 1.9949230925264963e-06, |
|
"loss": 0.5285, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 3.5763993948562782, |
|
"grad_norm": 1.090366244316101, |
|
"learning_rate": 1.988619834684499e-06, |
|
"loss": 0.5133, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.5809379727685324, |
|
"grad_norm": 0.9968334436416626, |
|
"learning_rate": 1.982319967810749e-06, |
|
"loss": 0.4654, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 3.5854765506807866, |
|
"grad_norm": 0.7116790413856506, |
|
"learning_rate": 1.976023533679744e-06, |
|
"loss": 0.5077, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 3.590015128593041, |
|
"grad_norm": 1.0453625917434692, |
|
"learning_rate": 1.969730574043217e-06, |
|
"loss": 0.5124, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 3.594553706505295, |
|
"grad_norm": 0.7500141859054565, |
|
"learning_rate": 1.9634411306298614e-06, |
|
"loss": 0.4981, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 3.5990922844175492, |
|
"grad_norm": 0.9187857508659363, |
|
"learning_rate": 1.9571552451450542e-06, |
|
"loss": 0.5318, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.6036308623298035, |
|
"grad_norm": 0.7069258689880371, |
|
"learning_rate": 1.950872959270581e-06, |
|
"loss": 0.4844, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 3.608169440242057, |
|
"grad_norm": 0.8754012584686279, |
|
"learning_rate": 1.944594314664358e-06, |
|
"loss": 0.4946, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 3.6127080181543114, |
|
"grad_norm": 0.8324469327926636, |
|
"learning_rate": 1.938319352960156e-06, |
|
"loss": 0.5172, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 3.6172465960665656, |
|
"grad_norm": 0.7333508729934692, |
|
"learning_rate": 1.9320481157673225e-06, |
|
"loss": 0.5157, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 3.62178517397882, |
|
"grad_norm": 0.7321659326553345, |
|
"learning_rate": 1.9257806446705116e-06, |
|
"loss": 0.5297, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.626323751891074, |
|
"grad_norm": 0.7912946343421936, |
|
"learning_rate": 1.919516981229401e-06, |
|
"loss": 0.5017, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 3.6308623298033282, |
|
"grad_norm": 0.7116424441337585, |
|
"learning_rate": 1.9132571669784215e-06, |
|
"loss": 0.502, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 3.6354009077155824, |
|
"grad_norm": 0.6774429678916931, |
|
"learning_rate": 1.9070012434264793e-06, |
|
"loss": 0.547, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 3.6399394856278366, |
|
"grad_norm": 0.9747133851051331, |
|
"learning_rate": 1.9007492520566813e-06, |
|
"loss": 0.512, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 3.644478063540091, |
|
"grad_norm": 0.6881934404373169, |
|
"learning_rate": 1.8945012343260605e-06, |
|
"loss": 0.5357, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.649016641452345, |
|
"grad_norm": 0.7221348881721497, |
|
"learning_rate": 1.8882572316653003e-06, |
|
"loss": 0.5118, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 3.6535552193645993, |
|
"grad_norm": 0.7595603466033936, |
|
"learning_rate": 1.88201728547846e-06, |
|
"loss": 0.5288, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 3.6580937972768535, |
|
"grad_norm": 0.9129202961921692, |
|
"learning_rate": 1.8757814371427003e-06, |
|
"loss": 0.4976, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 3.6626323751891077, |
|
"grad_norm": 1.1038625240325928, |
|
"learning_rate": 1.8695497280080094e-06, |
|
"loss": 0.5428, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 3.6671709531013614, |
|
"grad_norm": 0.9567497372627258, |
|
"learning_rate": 1.8633221993969285e-06, |
|
"loss": 0.498, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.6717095310136156, |
|
"grad_norm": 0.7735843062400818, |
|
"learning_rate": 1.857098892604277e-06, |
|
"loss": 0.5222, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 3.67624810892587, |
|
"grad_norm": 0.9261084794998169, |
|
"learning_rate": 1.8508798488968805e-06, |
|
"loss": 0.4969, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 3.680786686838124, |
|
"grad_norm": 0.7963117361068726, |
|
"learning_rate": 1.844665109513294e-06, |
|
"loss": 0.5127, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 3.6853252647503782, |
|
"grad_norm": 0.9042128920555115, |
|
"learning_rate": 1.8384547156635324e-06, |
|
"loss": 0.5214, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 3.6898638426626325, |
|
"grad_norm": 0.9819813370704651, |
|
"learning_rate": 1.8322487085287953e-06, |
|
"loss": 0.5208, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 3.6944024205748867, |
|
"grad_norm": 0.7022226452827454, |
|
"learning_rate": 1.8260471292611936e-06, |
|
"loss": 0.5422, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 3.6989409984871404, |
|
"grad_norm": 0.7549095749855042, |
|
"learning_rate": 1.8198500189834757e-06, |
|
"loss": 0.5197, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 3.7034795763993946, |
|
"grad_norm": 1.0044478178024292, |
|
"learning_rate": 1.813657418788757e-06, |
|
"loss": 0.5134, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 3.708018154311649, |
|
"grad_norm": 0.7384189963340759, |
|
"learning_rate": 1.8074693697402473e-06, |
|
"loss": 0.5367, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 3.712556732223903, |
|
"grad_norm": 0.7035711407661438, |
|
"learning_rate": 1.8012859128709766e-06, |
|
"loss": 0.509, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.7170953101361572, |
|
"grad_norm": 4.242894649505615, |
|
"learning_rate": 1.7951070891835245e-06, |
|
"loss": 0.4983, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 3.7216338880484114, |
|
"grad_norm": 0.7296944856643677, |
|
"learning_rate": 1.7889329396497478e-06, |
|
"loss": 0.5133, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 3.7261724659606656, |
|
"grad_norm": 0.6937296390533447, |
|
"learning_rate": 1.7827635052105095e-06, |
|
"loss": 0.5226, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 3.73071104387292, |
|
"grad_norm": 0.6841968894004822, |
|
"learning_rate": 1.7765988267754053e-06, |
|
"loss": 0.5287, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 3.735249621785174, |
|
"grad_norm": 0.7006022930145264, |
|
"learning_rate": 1.7704389452224945e-06, |
|
"loss": 0.5037, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 3.7397881996974283, |
|
"grad_norm": 1.5364707708358765, |
|
"learning_rate": 1.7642839013980305e-06, |
|
"loss": 0.4777, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 3.7443267776096825, |
|
"grad_norm": 0.7404047846794128, |
|
"learning_rate": 1.7581337361161838e-06, |
|
"loss": 0.5111, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 3.7488653555219367, |
|
"grad_norm": 0.6842325925827026, |
|
"learning_rate": 1.7519884901587773e-06, |
|
"loss": 0.5349, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 3.753403933434191, |
|
"grad_norm": 1.1200604438781738, |
|
"learning_rate": 1.7458482042750138e-06, |
|
"loss": 0.4894, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 3.7579425113464446, |
|
"grad_norm": 0.6718960404396057, |
|
"learning_rate": 1.7397129191812058e-06, |
|
"loss": 0.4482, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.762481089258699, |
|
"grad_norm": 0.7164394855499268, |
|
"learning_rate": 1.7335826755605043e-06, |
|
"loss": 0.5047, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 3.767019667170953, |
|
"grad_norm": 0.8456952571868896, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 0.4874, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 3.7715582450832073, |
|
"grad_norm": 0.8413048386573792, |
|
"learning_rate": 1.7213374753036105e-06, |
|
"loss": 0.52, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 3.7760968229954615, |
|
"grad_norm": 0.7125583291053772, |
|
"learning_rate": 1.7152225998654934e-06, |
|
"loss": 0.4917, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 3.7806354009077157, |
|
"grad_norm": 0.7068613767623901, |
|
"learning_rate": 1.7091129282960966e-06, |
|
"loss": 0.5111, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 3.78517397881997, |
|
"grad_norm": 0.7550258636474609, |
|
"learning_rate": 1.703008501108726e-06, |
|
"loss": 0.5303, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 3.789712556732224, |
|
"grad_norm": 0.7415627241134644, |
|
"learning_rate": 1.696909358781917e-06, |
|
"loss": 0.5094, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 3.794251134644478, |
|
"grad_norm": 0.6911723017692566, |
|
"learning_rate": 1.6908155417591576e-06, |
|
"loss": 0.4979, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 3.798789712556732, |
|
"grad_norm": 0.7296070456504822, |
|
"learning_rate": 1.684727090448624e-06, |
|
"loss": 0.5099, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 3.8033282904689862, |
|
"grad_norm": 0.7332204580307007, |
|
"learning_rate": 1.6786440452229134e-06, |
|
"loss": 0.5198, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.8078668683812404, |
|
"grad_norm": 0.8778614401817322, |
|
"learning_rate": 1.6725664464187734e-06, |
|
"loss": 0.4959, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 3.8124054462934946, |
|
"grad_norm": 0.7139490246772766, |
|
"learning_rate": 1.6664943343368386e-06, |
|
"loss": 0.4968, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 3.816944024205749, |
|
"grad_norm": 0.7621096968650818, |
|
"learning_rate": 1.660427749241359e-06, |
|
"loss": 0.4932, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 3.821482602118003, |
|
"grad_norm": 0.6902374625205994, |
|
"learning_rate": 1.6543667313599366e-06, |
|
"loss": 0.5035, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 3.8260211800302573, |
|
"grad_norm": 0.6864549517631531, |
|
"learning_rate": 1.6483113208832562e-06, |
|
"loss": 0.5058, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 3.8305597579425115, |
|
"grad_norm": 0.7007944583892822, |
|
"learning_rate": 1.6422615579648202e-06, |
|
"loss": 0.5303, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 3.8350983358547657, |
|
"grad_norm": 5.041836738586426, |
|
"learning_rate": 1.6362174827206806e-06, |
|
"loss": 0.5142, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 3.83963691376702, |
|
"grad_norm": 0.7452664971351624, |
|
"learning_rate": 1.6301791352291774e-06, |
|
"loss": 0.4758, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 3.844175491679274, |
|
"grad_norm": 0.777967095375061, |
|
"learning_rate": 1.6241465555306679e-06, |
|
"loss": 0.5047, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 3.848714069591528, |
|
"grad_norm": 0.723528265953064, |
|
"learning_rate": 1.618119783627263e-06, |
|
"loss": 0.5263, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.853252647503782, |
|
"grad_norm": 0.6719141006469727, |
|
"learning_rate": 1.612098859482562e-06, |
|
"loss": 0.5024, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 3.8577912254160363, |
|
"grad_norm": 0.7341691255569458, |
|
"learning_rate": 1.6060838230213883e-06, |
|
"loss": 0.5148, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 3.8623298033282905, |
|
"grad_norm": 0.7549681663513184, |
|
"learning_rate": 1.6000747141295233e-06, |
|
"loss": 0.5314, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 3.8668683812405447, |
|
"grad_norm": 0.7185303568840027, |
|
"learning_rate": 1.594071572653444e-06, |
|
"loss": 0.5095, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 3.871406959152799, |
|
"grad_norm": 0.7704632878303528, |
|
"learning_rate": 1.5880744384000544e-06, |
|
"loss": 0.4827, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 3.875945537065053, |
|
"grad_norm": 0.7089985609054565, |
|
"learning_rate": 1.5820833511364275e-06, |
|
"loss": 0.5108, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 3.8804841149773073, |
|
"grad_norm": 0.7662360072135925, |
|
"learning_rate": 1.5760983505895377e-06, |
|
"loss": 0.5123, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 3.885022692889561, |
|
"grad_norm": 0.8514230847358704, |
|
"learning_rate": 1.570119476445997e-06, |
|
"loss": 0.5043, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 3.8895612708018152, |
|
"grad_norm": 0.6921412348747253, |
|
"learning_rate": 1.5641467683517967e-06, |
|
"loss": 0.4982, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 3.8940998487140694, |
|
"grad_norm": 0.6514143347740173, |
|
"learning_rate": 1.558180265912037e-06, |
|
"loss": 0.5285, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.8986384266263236, |
|
"grad_norm": 1.3229821920394897, |
|
"learning_rate": 1.5522200086906708e-06, |
|
"loss": 0.5135, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 3.903177004538578, |
|
"grad_norm": 0.6929940581321716, |
|
"learning_rate": 1.5462660362102371e-06, |
|
"loss": 0.5027, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 3.907715582450832, |
|
"grad_norm": 0.8205671906471252, |
|
"learning_rate": 1.5403183879516025e-06, |
|
"loss": 0.5031, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 3.9122541603630863, |
|
"grad_norm": 0.7823313474655151, |
|
"learning_rate": 1.534377103353696e-06, |
|
"loss": 0.5212, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 3.9167927382753405, |
|
"grad_norm": 1.095670223236084, |
|
"learning_rate": 1.5284422218132495e-06, |
|
"loss": 0.4762, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 3.9213313161875947, |
|
"grad_norm": 0.7315182685852051, |
|
"learning_rate": 1.5225137826845371e-06, |
|
"loss": 0.5012, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 3.925869894099849, |
|
"grad_norm": 1.2629145383834839, |
|
"learning_rate": 1.5165918252791125e-06, |
|
"loss": 0.488, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 3.930408472012103, |
|
"grad_norm": 0.8126862049102783, |
|
"learning_rate": 1.510676388865548e-06, |
|
"loss": 0.4999, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 3.9349470499243573, |
|
"grad_norm": 0.7050077319145203, |
|
"learning_rate": 1.5047675126691783e-06, |
|
"loss": 0.4863, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 3.939485627836611, |
|
"grad_norm": 0.8243216276168823, |
|
"learning_rate": 1.4988652358718336e-06, |
|
"loss": 0.5145, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.9440242057488653, |
|
"grad_norm": 0.9307572245597839, |
|
"learning_rate": 1.4929695976115854e-06, |
|
"loss": 0.5001, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 3.9485627836611195, |
|
"grad_norm": 0.6967872381210327, |
|
"learning_rate": 1.4870806369824847e-06, |
|
"loss": 0.5402, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 3.9531013615733737, |
|
"grad_norm": 0.7018783092498779, |
|
"learning_rate": 1.4811983930343018e-06, |
|
"loss": 0.5262, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 3.957639939485628, |
|
"grad_norm": 0.6808320879936218, |
|
"learning_rate": 1.4753229047722704e-06, |
|
"loss": 0.5046, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 3.962178517397882, |
|
"grad_norm": 1.0063635110855103, |
|
"learning_rate": 1.4694542111568261e-06, |
|
"loss": 0.5292, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 3.9667170953101363, |
|
"grad_norm": 0.7578749060630798, |
|
"learning_rate": 1.4635923511033496e-06, |
|
"loss": 0.4964, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 3.9712556732223905, |
|
"grad_norm": 0.7392986416816711, |
|
"learning_rate": 1.4577373634819075e-06, |
|
"loss": 0.5081, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 3.9757942511346442, |
|
"grad_norm": 0.737310528755188, |
|
"learning_rate": 1.451889287116996e-06, |
|
"loss": 0.5507, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 3.9803328290468984, |
|
"grad_norm": 1.102256417274475, |
|
"learning_rate": 1.446048160787282e-06, |
|
"loss": 0.5041, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 3.9848714069591527, |
|
"grad_norm": 0.790909469127655, |
|
"learning_rate": 1.4402140232253486e-06, |
|
"loss": 0.5064, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.989409984871407, |
|
"grad_norm": 0.6738175749778748, |
|
"learning_rate": 1.4343869131174323e-06, |
|
"loss": 0.4915, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 3.993948562783661, |
|
"grad_norm": 1.0193603038787842, |
|
"learning_rate": 1.4285668691031751e-06, |
|
"loss": 0.5251, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 3.9984871406959153, |
|
"grad_norm": 0.7585704922676086, |
|
"learning_rate": 1.422753929775359e-06, |
|
"loss": 0.5448, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.7585704922676086, |
|
"learning_rate": 1.4169481336796598e-06, |
|
"loss": 0.175, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 4.004538577912254, |
|
"grad_norm": 0.6884925365447998, |
|
"learning_rate": 1.411149519314381e-06, |
|
"loss": 0.4632, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 4.009077155824508, |
|
"grad_norm": 0.7509837746620178, |
|
"learning_rate": 1.405358125130209e-06, |
|
"loss": 0.54, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 4.013615733736763, |
|
"grad_norm": 0.6542647480964661, |
|
"learning_rate": 1.399573989529949e-06, |
|
"loss": 0.4786, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 4.018154311649017, |
|
"grad_norm": 0.8852865099906921, |
|
"learning_rate": 1.393797150868278e-06, |
|
"loss": 0.4871, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 4.022692889561271, |
|
"grad_norm": 0.6787682771682739, |
|
"learning_rate": 1.3880276474514841e-06, |
|
"loss": 0.5156, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 4.027231467473525, |
|
"grad_norm": 1.1594585180282593, |
|
"learning_rate": 1.3822655175372148e-06, |
|
"loss": 0.4806, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.031770045385779, |
|
"grad_norm": 1.0756573677062988, |
|
"learning_rate": 1.3765107993342292e-06, |
|
"loss": 0.4593, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 4.036308623298034, |
|
"grad_norm": 0.8092817068099976, |
|
"learning_rate": 1.370763531002132e-06, |
|
"loss": 0.5245, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 4.040847201210288, |
|
"grad_norm": 0.6932277679443359, |
|
"learning_rate": 1.3650237506511333e-06, |
|
"loss": 0.4957, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 4.045385779122542, |
|
"grad_norm": 0.7002460360527039, |
|
"learning_rate": 1.3592914963417864e-06, |
|
"loss": 0.4924, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 4.049924357034795, |
|
"grad_norm": 0.7570782899856567, |
|
"learning_rate": 1.3535668060847428e-06, |
|
"loss": 0.4914, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 4.05446293494705, |
|
"grad_norm": 0.8757203817367554, |
|
"learning_rate": 1.347849717840493e-06, |
|
"loss": 0.4971, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 4.059001512859304, |
|
"grad_norm": 0.7901989817619324, |
|
"learning_rate": 1.3421402695191227e-06, |
|
"loss": 0.5, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 4.063540090771558, |
|
"grad_norm": 0.7769845724105835, |
|
"learning_rate": 1.3364384989800522e-06, |
|
"loss": 0.4795, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 4.068078668683812, |
|
"grad_norm": 0.78565514087677, |
|
"learning_rate": 1.3307444440317956e-06, |
|
"loss": 0.4727, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 4.072617246596066, |
|
"grad_norm": 0.8624851703643799, |
|
"learning_rate": 1.3250581424317012e-06, |
|
"loss": 0.5184, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.077155824508321, |
|
"grad_norm": 0.7443495988845825, |
|
"learning_rate": 1.3193796318857031e-06, |
|
"loss": 0.474, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 4.081694402420575, |
|
"grad_norm": 0.8881354331970215, |
|
"learning_rate": 1.3137089500480802e-06, |
|
"loss": 0.4846, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 4.086232980332829, |
|
"grad_norm": 0.6768248081207275, |
|
"learning_rate": 1.3080461345211909e-06, |
|
"loss": 0.4898, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 4.090771558245083, |
|
"grad_norm": 0.7537845969200134, |
|
"learning_rate": 1.3023912228552383e-06, |
|
"loss": 0.4985, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 4.095310136157337, |
|
"grad_norm": 0.8789769411087036, |
|
"learning_rate": 1.2967442525480092e-06, |
|
"loss": 0.4818, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 4.099848714069592, |
|
"grad_norm": 0.8388983011245728, |
|
"learning_rate": 1.2911052610446367e-06, |
|
"loss": 0.495, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 4.104387291981846, |
|
"grad_norm": 0.7456345558166504, |
|
"learning_rate": 1.2854742857373413e-06, |
|
"loss": 0.4672, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 4.1089258698941, |
|
"grad_norm": 0.6598865985870361, |
|
"learning_rate": 1.279851363965193e-06, |
|
"loss": 0.467, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 4.113464447806354, |
|
"grad_norm": 0.7831140160560608, |
|
"learning_rate": 1.2742365330138546e-06, |
|
"loss": 0.4881, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 4.118003025718608, |
|
"grad_norm": 0.8532465696334839, |
|
"learning_rate": 1.2686298301153394e-06, |
|
"loss": 0.5227, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.122541603630863, |
|
"grad_norm": 0.8814959526062012, |
|
"learning_rate": 1.2630312924477662e-06, |
|
"loss": 0.4924, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 4.127080181543117, |
|
"grad_norm": 0.8461480140686035, |
|
"learning_rate": 1.2574409571351048e-06, |
|
"loss": 0.5074, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 4.131618759455371, |
|
"grad_norm": 0.8556418418884277, |
|
"learning_rate": 1.2518588612469423e-06, |
|
"loss": 0.4854, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 4.136157337367624, |
|
"grad_norm": 1.0551602840423584, |
|
"learning_rate": 1.2462850417982234e-06, |
|
"loss": 0.4865, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 4.140695915279879, |
|
"grad_norm": 1.2184538841247559, |
|
"learning_rate": 1.2407195357490163e-06, |
|
"loss": 0.4871, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 4.145234493192133, |
|
"grad_norm": 0.6897459030151367, |
|
"learning_rate": 1.2351623800042586e-06, |
|
"loss": 0.5006, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 4.149773071104387, |
|
"grad_norm": 0.8268042802810669, |
|
"learning_rate": 1.2296136114135215e-06, |
|
"loss": 0.4919, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 4.154311649016641, |
|
"grad_norm": 0.9411110281944275, |
|
"learning_rate": 1.2240732667707581e-06, |
|
"loss": 0.485, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 4.158850226928895, |
|
"grad_norm": 0.9568235278129578, |
|
"learning_rate": 1.218541382814061e-06, |
|
"loss": 0.4916, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 4.16338880484115, |
|
"grad_norm": 1.0960578918457031, |
|
"learning_rate": 1.213017996225424e-06, |
|
"loss": 0.5029, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.167927382753404, |
|
"grad_norm": 0.7468828558921814, |
|
"learning_rate": 1.2075031436304906e-06, |
|
"loss": 0.4882, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 4.172465960665658, |
|
"grad_norm": 0.9359505772590637, |
|
"learning_rate": 1.2019968615983187e-06, |
|
"loss": 0.4869, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 4.177004538577912, |
|
"grad_norm": 0.7222769260406494, |
|
"learning_rate": 1.1964991866411304e-06, |
|
"loss": 0.4573, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 4.181543116490166, |
|
"grad_norm": 0.7045331597328186, |
|
"learning_rate": 1.1910101552140808e-06, |
|
"loss": 0.5192, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 4.186081694402421, |
|
"grad_norm": 0.8232183456420898, |
|
"learning_rate": 1.1855298037150022e-06, |
|
"loss": 0.5022, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 4.190620272314675, |
|
"grad_norm": 0.7209495902061462, |
|
"learning_rate": 1.1800581684841765e-06, |
|
"loss": 0.4956, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 4.195158850226929, |
|
"grad_norm": 0.838749349117279, |
|
"learning_rate": 1.1745952858040834e-06, |
|
"loss": 0.4971, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 4.199697428139183, |
|
"grad_norm": 0.6986132264137268, |
|
"learning_rate": 1.1691411918991657e-06, |
|
"loss": 0.4914, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 4.204236006051437, |
|
"grad_norm": 0.7277268767356873, |
|
"learning_rate": 1.1636959229355894e-06, |
|
"loss": 0.4645, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 4.208774583963692, |
|
"grad_norm": 0.7252954244613647, |
|
"learning_rate": 1.158259515020999e-06, |
|
"loss": 0.5153, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.213313161875946, |
|
"grad_norm": 0.6610934734344482, |
|
"learning_rate": 1.1528320042042853e-06, |
|
"loss": 0.4828, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 4.2178517397882, |
|
"grad_norm": 1.002673864364624, |
|
"learning_rate": 1.1474134264753384e-06, |
|
"loss": 0.4962, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 4.222390317700454, |
|
"grad_norm": 0.7143538594245911, |
|
"learning_rate": 1.142003817764816e-06, |
|
"loss": 0.4798, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 4.2269288956127085, |
|
"grad_norm": 0.8639364242553711, |
|
"learning_rate": 1.1366032139438987e-06, |
|
"loss": 0.4641, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 4.231467473524962, |
|
"grad_norm": 0.8112154603004456, |
|
"learning_rate": 1.1312116508240612e-06, |
|
"loss": 0.4971, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 4.236006051437216, |
|
"grad_norm": 0.7791656851768494, |
|
"learning_rate": 1.1258291641568237e-06, |
|
"loss": 0.4963, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 4.24054462934947, |
|
"grad_norm": 0.849223256111145, |
|
"learning_rate": 1.1204557896335217e-06, |
|
"loss": 0.4826, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 4.245083207261724, |
|
"grad_norm": 0.6998844146728516, |
|
"learning_rate": 1.1150915628850702e-06, |
|
"loss": 0.4996, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 4.249621785173979, |
|
"grad_norm": 0.7256484627723694, |
|
"learning_rate": 1.1097365194817222e-06, |
|
"loss": 0.4219, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 4.254160363086233, |
|
"grad_norm": 0.7923110723495483, |
|
"learning_rate": 1.1043906949328387e-06, |
|
"loss": 0.4731, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.258698940998487, |
|
"grad_norm": 0.9490653872489929, |
|
"learning_rate": 1.0990541246866473e-06, |
|
"loss": 0.4884, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 4.263237518910741, |
|
"grad_norm": 1.0265283584594727, |
|
"learning_rate": 1.0937268441300136e-06, |
|
"loss": 0.4991, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 4.267776096822995, |
|
"grad_norm": 0.938533365726471, |
|
"learning_rate": 1.088408888588199e-06, |
|
"loss": 0.4748, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 4.27231467473525, |
|
"grad_norm": 0.6923869252204895, |
|
"learning_rate": 1.083100293324636e-06, |
|
"loss": 0.4721, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 4.276853252647504, |
|
"grad_norm": 0.7291610836982727, |
|
"learning_rate": 1.0778010935406826e-06, |
|
"loss": 0.5147, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 4.281391830559758, |
|
"grad_norm": 0.988376259803772, |
|
"learning_rate": 1.0725113243754009e-06, |
|
"loss": 0.4504, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 4.285930408472012, |
|
"grad_norm": 0.7914155125617981, |
|
"learning_rate": 1.067231020905316e-06, |
|
"loss": 0.4973, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 4.290468986384266, |
|
"grad_norm": 2.622145175933838, |
|
"learning_rate": 1.061960218144185e-06, |
|
"loss": 0.4732, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 4.295007564296521, |
|
"grad_norm": 0.722761332988739, |
|
"learning_rate": 1.0566989510427678e-06, |
|
"loss": 0.4759, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 4.299546142208775, |
|
"grad_norm": 0.7668985724449158, |
|
"learning_rate": 1.051447254488591e-06, |
|
"loss": 0.4969, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.304084720121029, |
|
"grad_norm": 0.9016123414039612, |
|
"learning_rate": 1.0462051633057211e-06, |
|
"loss": 0.4827, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 4.308623298033283, |
|
"grad_norm": 0.7125791907310486, |
|
"learning_rate": 1.0409727122545285e-06, |
|
"loss": 0.4691, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 4.3131618759455375, |
|
"grad_norm": 0.7095142006874084, |
|
"learning_rate": 1.0357499360314632e-06, |
|
"loss": 0.4705, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 4.317700453857791, |
|
"grad_norm": 1.2595562934875488, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 0.4949, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 4.322239031770045, |
|
"grad_norm": 1.0172781944274902, |
|
"learning_rate": 1.0253335465345037e-06, |
|
"loss": 0.4448, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 4.326777609682299, |
|
"grad_norm": 0.8226476907730103, |
|
"learning_rate": 1.0201400023318184e-06, |
|
"loss": 0.5028, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 4.331316187594553, |
|
"grad_norm": 0.9284406304359436, |
|
"learning_rate": 1.0149562710992184e-06, |
|
"loss": 0.4971, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 4.335854765506808, |
|
"grad_norm": 2.741795063018799, |
|
"learning_rate": 1.0097823872100913e-06, |
|
"loss": 0.5247, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 4.340393343419062, |
|
"grad_norm": 0.8518880605697632, |
|
"learning_rate": 1.0046183849725233e-06, |
|
"loss": 0.5039, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 4.344931921331316, |
|
"grad_norm": 0.8044947981834412, |
|
"learning_rate": 9.994642986290797e-07, |
|
"loss": 0.4951, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 4.34947049924357, |
|
"grad_norm": 1.1144733428955078, |
|
"learning_rate": 9.943201623565698e-07, |
|
"loss": 0.4707, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 4.354009077155824, |
|
"grad_norm": 0.8300093412399292, |
|
"learning_rate": 9.891860102658267e-07, |
|
"loss": 0.4889, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 4.358547655068079, |
|
"grad_norm": 0.9095519781112671, |
|
"learning_rate": 9.840618764014756e-07, |
|
"loss": 0.4901, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 4.363086232980333, |
|
"grad_norm": 0.7411296963691711, |
|
"learning_rate": 9.78947794741713e-07, |
|
"loss": 0.4783, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 4.367624810892587, |
|
"grad_norm": 0.9816136360168457, |
|
"learning_rate": 9.73843799198077e-07, |
|
"loss": 0.4937, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 4.372163388804841, |
|
"grad_norm": 0.7506483793258667, |
|
"learning_rate": 9.687499236152262e-07, |
|
"loss": 0.4725, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 4.376701966717095, |
|
"grad_norm": 1.1032840013504028, |
|
"learning_rate": 9.63666201770714e-07, |
|
"loss": 0.4978, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 4.38124054462935, |
|
"grad_norm": 0.6765017509460449, |
|
"learning_rate": 9.585926673747617e-07, |
|
"loss": 0.4702, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 4.385779122541604, |
|
"grad_norm": 0.71392422914505, |
|
"learning_rate": 9.535293540700408e-07, |
|
"loss": 0.4616, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 4.390317700453858, |
|
"grad_norm": 1.6125102043151855, |
|
"learning_rate": 9.48476295431443e-07, |
|
"loss": 0.5081, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 4.394856278366112, |
|
"grad_norm": 0.7444176077842712, |
|
"learning_rate": 9.434335249658649e-07, |
|
"loss": 0.4614, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 4.3993948562783665, |
|
"grad_norm": 0.6820570230484009, |
|
"learning_rate": 9.384010761119788e-07, |
|
"loss": 0.4625, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 4.403933434190621, |
|
"grad_norm": 0.6871091723442078, |
|
"learning_rate": 9.333789822400172e-07, |
|
"loss": 0.5177, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 4.408472012102875, |
|
"grad_norm": 0.7985364198684692, |
|
"learning_rate": 9.283672766515455e-07, |
|
"loss": 0.5195, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 4.413010590015128, |
|
"grad_norm": 0.7615556716918945, |
|
"learning_rate": 9.233659925792476e-07, |
|
"loss": 0.4599, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 4.417549167927382, |
|
"grad_norm": 0.754677414894104, |
|
"learning_rate": 9.183751631866992e-07, |
|
"loss": 0.4978, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 4.422087745839637, |
|
"grad_norm": 0.7244731187820435, |
|
"learning_rate": 9.133948215681524e-07, |
|
"loss": 0.4411, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 4.426626323751891, |
|
"grad_norm": 0.778304398059845, |
|
"learning_rate": 9.084250007483158e-07, |
|
"loss": 0.5019, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 4.431164901664145, |
|
"grad_norm": 1.0169285535812378, |
|
"learning_rate": 9.034657336821312e-07, |
|
"loss": 0.4648, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 4.435703479576399, |
|
"grad_norm": 0.7210962772369385, |
|
"learning_rate": 8.985170532545623e-07, |
|
"loss": 0.508, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.440242057488653, |
|
"grad_norm": 0.7974186539649963, |
|
"learning_rate": 8.935789922803689e-07, |
|
"loss": 0.504, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 4.444780635400908, |
|
"grad_norm": 0.6913062930107117, |
|
"learning_rate": 8.886515835038967e-07, |
|
"loss": 0.4903, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 4.449319213313162, |
|
"grad_norm": 0.7884510159492493, |
|
"learning_rate": 8.837348595988526e-07, |
|
"loss": 0.4826, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 4.453857791225416, |
|
"grad_norm": 0.7987708449363708, |
|
"learning_rate": 8.78828853168096e-07, |
|
"loss": 0.4764, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 4.45839636913767, |
|
"grad_norm": 0.8689277172088623, |
|
"learning_rate": 8.739335967434151e-07, |
|
"loss": 0.4681, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 4.462934947049924, |
|
"grad_norm": 0.8613301515579224, |
|
"learning_rate": 8.690491227853177e-07, |
|
"loss": 0.4696, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 4.467473524962179, |
|
"grad_norm": 1.1412184238433838, |
|
"learning_rate": 8.6417546368281e-07, |
|
"loss": 0.5196, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 4.472012102874433, |
|
"grad_norm": 0.7207121253013611, |
|
"learning_rate": 8.593126517531869e-07, |
|
"loss": 0.4838, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 4.476550680786687, |
|
"grad_norm": 3.387355327606201, |
|
"learning_rate": 8.544607192418161e-07, |
|
"loss": 0.4824, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 4.481089258698941, |
|
"grad_norm": 0.9257369637489319, |
|
"learning_rate": 8.496196983219205e-07, |
|
"loss": 0.4825, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.4856278366111955, |
|
"grad_norm": 0.8000094294548035, |
|
"learning_rate": 8.447896210943718e-07, |
|
"loss": 0.5123, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 4.49016641452345, |
|
"grad_norm": 0.6996716260910034, |
|
"learning_rate": 8.399705195874708e-07, |
|
"loss": 0.5056, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 4.494704992435704, |
|
"grad_norm": 0.688705563545227, |
|
"learning_rate": 8.351624257567415e-07, |
|
"loss": 0.4915, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 4.499243570347957, |
|
"grad_norm": 1.2094013690948486, |
|
"learning_rate": 8.303653714847118e-07, |
|
"loss": 0.4977, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 4.503782148260212, |
|
"grad_norm": 0.7317572832107544, |
|
"learning_rate": 8.255793885807104e-07, |
|
"loss": 0.4863, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 4.508320726172466, |
|
"grad_norm": 0.7524070739746094, |
|
"learning_rate": 8.208045087806479e-07, |
|
"loss": 0.5072, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 4.51285930408472, |
|
"grad_norm": 0.7206716537475586, |
|
"learning_rate": 8.160407637468134e-07, |
|
"loss": 0.4787, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 4.517397881996974, |
|
"grad_norm": 0.9675849080085754, |
|
"learning_rate": 8.112881850676585e-07, |
|
"loss": 0.5062, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 4.521936459909228, |
|
"grad_norm": 0.6949933767318726, |
|
"learning_rate": 8.065468042575905e-07, |
|
"loss": 0.508, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 4.526475037821482, |
|
"grad_norm": 0.9218463897705078, |
|
"learning_rate": 8.018166527567672e-07, |
|
"loss": 0.486, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.531013615733737, |
|
"grad_norm": 0.7000327110290527, |
|
"learning_rate": 7.970977619308806e-07, |
|
"loss": 0.46, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 4.535552193645991, |
|
"grad_norm": 0.740325391292572, |
|
"learning_rate": 7.923901630709554e-07, |
|
"loss": 0.4896, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 4.540090771558245, |
|
"grad_norm": 1.6052324771881104, |
|
"learning_rate": 7.87693887393137e-07, |
|
"loss": 0.4878, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 4.544629349470499, |
|
"grad_norm": 0.7649421095848083, |
|
"learning_rate": 7.830089660384896e-07, |
|
"loss": 0.4924, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 4.549167927382753, |
|
"grad_norm": 0.734867513179779, |
|
"learning_rate": 7.783354300727835e-07, |
|
"loss": 0.4788, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 4.553706505295008, |
|
"grad_norm": 0.6974482536315918, |
|
"learning_rate": 7.736733104862953e-07, |
|
"loss": 0.48, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 4.558245083207262, |
|
"grad_norm": 0.7161325812339783, |
|
"learning_rate": 7.690226381935976e-07, |
|
"loss": 0.5301, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 4.562783661119516, |
|
"grad_norm": 0.676956057548523, |
|
"learning_rate": 7.643834440333553e-07, |
|
"loss": 0.4803, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 4.56732223903177, |
|
"grad_norm": 0.7367483973503113, |
|
"learning_rate": 7.597557587681242e-07, |
|
"loss": 0.4826, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 4.5718608169440245, |
|
"grad_norm": 0.7037650942802429, |
|
"learning_rate": 7.551396130841406e-07, |
|
"loss": 0.4926, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.576399394856279, |
|
"grad_norm": 0.6749675869941711, |
|
"learning_rate": 7.505350375911278e-07, |
|
"loss": 0.5091, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 4.580937972768533, |
|
"grad_norm": 0.6664526462554932, |
|
"learning_rate": 7.459420628220801e-07, |
|
"loss": 0.4723, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 4.585476550680786, |
|
"grad_norm": 0.7849709391593933, |
|
"learning_rate": 7.413607192330724e-07, |
|
"loss": 0.5035, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 4.590015128593041, |
|
"grad_norm": 0.881906270980835, |
|
"learning_rate": 7.367910372030495e-07, |
|
"loss": 0.4922, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 4.594553706505295, |
|
"grad_norm": 1.0520840883255005, |
|
"learning_rate": 7.322330470336314e-07, |
|
"loss": 0.4872, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 4.599092284417549, |
|
"grad_norm": 0.7468703985214233, |
|
"learning_rate": 7.27686778948907e-07, |
|
"loss": 0.4962, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 4.603630862329803, |
|
"grad_norm": 1.159132480621338, |
|
"learning_rate": 7.231522630952359e-07, |
|
"loss": 0.4881, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 4.608169440242057, |
|
"grad_norm": 0.7285956144332886, |
|
"learning_rate": 7.186295295410506e-07, |
|
"loss": 0.5156, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 4.612708018154311, |
|
"grad_norm": 0.7299140095710754, |
|
"learning_rate": 7.141186082766521e-07, |
|
"loss": 0.5128, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 4.617246596066566, |
|
"grad_norm": 0.8349388837814331, |
|
"learning_rate": 7.096195292140173e-07, |
|
"loss": 0.4753, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.62178517397882, |
|
"grad_norm": 0.7903823256492615, |
|
"learning_rate": 7.051323221865933e-07, |
|
"loss": 0.4785, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 4.626323751891074, |
|
"grad_norm": 0.9033539295196533, |
|
"learning_rate": 7.006570169491084e-07, |
|
"loss": 0.517, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 4.630862329803328, |
|
"grad_norm": 0.7931293845176697, |
|
"learning_rate": 6.961936431773655e-07, |
|
"loss": 0.4976, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 4.635400907715582, |
|
"grad_norm": 2.131030559539795, |
|
"learning_rate": 6.917422304680532e-07, |
|
"loss": 0.4781, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 4.639939485627837, |
|
"grad_norm": 0.881932258605957, |
|
"learning_rate": 6.873028083385436e-07, |
|
"loss": 0.4841, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 4.644478063540091, |
|
"grad_norm": 0.7999169230461121, |
|
"learning_rate": 6.828754062266996e-07, |
|
"loss": 0.5095, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 4.649016641452345, |
|
"grad_norm": 0.7111617922782898, |
|
"learning_rate": 6.784600534906813e-07, |
|
"loss": 0.4742, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 4.653555219364599, |
|
"grad_norm": 0.7100422978401184, |
|
"learning_rate": 6.740567794087463e-07, |
|
"loss": 0.5097, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 4.6580937972768535, |
|
"grad_norm": 0.7377627491950989, |
|
"learning_rate": 6.696656131790611e-07, |
|
"loss": 0.515, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 4.662632375189108, |
|
"grad_norm": 0.7187225222587585, |
|
"learning_rate": 6.652865839195025e-07, |
|
"loss": 0.4902, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.667170953101362, |
|
"grad_norm": 0.7497810125350952, |
|
"learning_rate": 6.609197206674694e-07, |
|
"loss": 0.4835, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 4.671709531013616, |
|
"grad_norm": 0.7579675912857056, |
|
"learning_rate": 6.565650523796843e-07, |
|
"loss": 0.4518, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 4.67624810892587, |
|
"grad_norm": 0.7889072299003601, |
|
"learning_rate": 6.522226079320099e-07, |
|
"loss": 0.4745, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 4.680786686838124, |
|
"grad_norm": 0.8384209275245667, |
|
"learning_rate": 6.478924161192479e-07, |
|
"loss": 0.4639, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 4.685325264750379, |
|
"grad_norm": 0.876602292060852, |
|
"learning_rate": 6.435745056549533e-07, |
|
"loss": 0.5324, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 4.689863842662632, |
|
"grad_norm": 1.0869137048721313, |
|
"learning_rate": 6.392689051712458e-07, |
|
"loss": 0.4907, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 4.694402420574886, |
|
"grad_norm": 0.7678726315498352, |
|
"learning_rate": 6.349756432186143e-07, |
|
"loss": 0.5281, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 4.69894099848714, |
|
"grad_norm": 0.7951900959014893, |
|
"learning_rate": 6.306947482657341e-07, |
|
"loss": 0.4939, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 4.703479576399395, |
|
"grad_norm": 0.7335869073867798, |
|
"learning_rate": 6.264262486992712e-07, |
|
"loss": 0.441, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 4.708018154311649, |
|
"grad_norm": 0.6744561195373535, |
|
"learning_rate": 6.221701728237008e-07, |
|
"loss": 0.4914, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.712556732223903, |
|
"grad_norm": 0.8650886416435242, |
|
"learning_rate": 6.179265488611138e-07, |
|
"loss": 0.5063, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 4.717095310136157, |
|
"grad_norm": 0.7943755984306335, |
|
"learning_rate": 6.136954049510351e-07, |
|
"loss": 0.4598, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 4.721633888048411, |
|
"grad_norm": 0.870184063911438, |
|
"learning_rate": 6.09476769150231e-07, |
|
"loss": 0.5063, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 4.726172465960666, |
|
"grad_norm": 0.7460466027259827, |
|
"learning_rate": 6.052706694325289e-07, |
|
"loss": 0.5014, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 4.73071104387292, |
|
"grad_norm": 1.1483376026153564, |
|
"learning_rate": 6.010771336886292e-07, |
|
"loss": 0.5028, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 4.735249621785174, |
|
"grad_norm": 0.7843842506408691, |
|
"learning_rate": 5.968961897259185e-07, |
|
"loss": 0.4962, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 4.739788199697428, |
|
"grad_norm": 0.7222856283187866, |
|
"learning_rate": 5.927278652682891e-07, |
|
"loss": 0.4836, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 4.7443267776096825, |
|
"grad_norm": 0.969645619392395, |
|
"learning_rate": 5.885721879559514e-07, |
|
"loss": 0.4867, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 4.748865355521937, |
|
"grad_norm": 1.05107581615448, |
|
"learning_rate": 5.844291853452549e-07, |
|
"loss": 0.4977, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 4.753403933434191, |
|
"grad_norm": 0.6660315990447998, |
|
"learning_rate": 5.802988849085001e-07, |
|
"loss": 0.5086, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.757942511346445, |
|
"grad_norm": 1.050270438194275, |
|
"learning_rate": 5.761813140337619e-07, |
|
"loss": 0.4938, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 4.762481089258699, |
|
"grad_norm": 1.5800691843032837, |
|
"learning_rate": 5.720765000247027e-07, |
|
"loss": 0.5146, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 4.7670196671709535, |
|
"grad_norm": 0.6957308650016785, |
|
"learning_rate": 5.67984470100397e-07, |
|
"loss": 0.4802, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 4.771558245083208, |
|
"grad_norm": 13.844222068786621, |
|
"learning_rate": 5.639052513951449e-07, |
|
"loss": 0.5171, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 4.776096822995461, |
|
"grad_norm": 1.097083330154419, |
|
"learning_rate": 5.598388709582963e-07, |
|
"loss": 0.4915, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 4.780635400907715, |
|
"grad_norm": 0.8402828574180603, |
|
"learning_rate": 5.557853557540724e-07, |
|
"loss": 0.4767, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 4.785173978819969, |
|
"grad_norm": 1.0703009366989136, |
|
"learning_rate": 5.517447326613809e-07, |
|
"loss": 0.4577, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 4.789712556732224, |
|
"grad_norm": 0.824536919593811, |
|
"learning_rate": 5.477170284736455e-07, |
|
"loss": 0.4635, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 4.794251134644478, |
|
"grad_norm": 0.7436249256134033, |
|
"learning_rate": 5.437022698986217e-07, |
|
"loss": 0.4882, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 4.798789712556732, |
|
"grad_norm": 0.7780619859695435, |
|
"learning_rate": 5.397004835582242e-07, |
|
"loss": 0.4823, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.803328290468986, |
|
"grad_norm": 0.9229158163070679, |
|
"learning_rate": 5.357116959883477e-07, |
|
"loss": 0.4963, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 4.80786686838124, |
|
"grad_norm": 0.7092882990837097, |
|
"learning_rate": 5.317359336386932e-07, |
|
"loss": 0.5061, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 4.812405446293495, |
|
"grad_norm": 0.8238071799278259, |
|
"learning_rate": 5.27773222872589e-07, |
|
"loss": 0.4716, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 4.816944024205749, |
|
"grad_norm": 0.9120982885360718, |
|
"learning_rate": 5.238235899668212e-07, |
|
"loss": 0.4613, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 4.821482602118003, |
|
"grad_norm": 0.8099984526634216, |
|
"learning_rate": 5.198870611114529e-07, |
|
"loss": 0.4949, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 4.826021180030257, |
|
"grad_norm": 0.885421097278595, |
|
"learning_rate": 5.159636624096572e-07, |
|
"loss": 0.4697, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 4.8305597579425115, |
|
"grad_norm": 0.8806759715080261, |
|
"learning_rate": 5.120534198775404e-07, |
|
"loss": 0.5057, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 4.835098335854766, |
|
"grad_norm": 1.269351840019226, |
|
"learning_rate": 5.081563594439676e-07, |
|
"loss": 0.5088, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 4.83963691376702, |
|
"grad_norm": 0.7071214318275452, |
|
"learning_rate": 5.04272506950397e-07, |
|
"loss": 0.4803, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 4.844175491679274, |
|
"grad_norm": 0.762505054473877, |
|
"learning_rate": 5.004018881507016e-07, |
|
"loss": 0.4945, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.848714069591528, |
|
"grad_norm": 0.792746365070343, |
|
"learning_rate": 4.965445287110038e-07, |
|
"loss": 0.5061, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 4.8532526475037825, |
|
"grad_norm": 0.7810184955596924, |
|
"learning_rate": 4.92700454209501e-07, |
|
"loss": 0.4824, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 4.857791225416037, |
|
"grad_norm": 0.7621875405311584, |
|
"learning_rate": 4.888696901363005e-07, |
|
"loss": 0.4603, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 4.86232980332829, |
|
"grad_norm": 1.3374532461166382, |
|
"learning_rate": 4.850522618932449e-07, |
|
"loss": 0.5073, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 4.866868381240545, |
|
"grad_norm": 0.7166194915771484, |
|
"learning_rate": 4.812481947937498e-07, |
|
"loss": 0.484, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 4.871406959152798, |
|
"grad_norm": 1.4800307750701904, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 0.4728, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 4.875945537065053, |
|
"grad_norm": 0.7257369160652161, |
|
"learning_rate": 4.736802448359404e-07, |
|
"loss": 0.4827, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 4.880484114977307, |
|
"grad_norm": 0.6776279211044312, |
|
"learning_rate": 4.6991641216079684e-07, |
|
"loss": 0.5066, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 4.885022692889561, |
|
"grad_norm": 0.7334598898887634, |
|
"learning_rate": 4.6616604099522097e-07, |
|
"loss": 0.4857, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 4.889561270801815, |
|
"grad_norm": 0.6790256500244141, |
|
"learning_rate": 4.624291562079719e-07, |
|
"loss": 0.4921, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.8940998487140694, |
|
"grad_norm": 0.7464075088500977, |
|
"learning_rate": 4.5870578257837803e-07, |
|
"loss": 0.4441, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 4.898638426626324, |
|
"grad_norm": 0.7377055287361145, |
|
"learning_rate": 4.549959447961777e-07, |
|
"loss": 0.4607, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 4.903177004538578, |
|
"grad_norm": 0.8656676411628723, |
|
"learning_rate": 4.5129966746134987e-07, |
|
"loss": 0.4524, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 4.907715582450832, |
|
"grad_norm": 0.9937976002693176, |
|
"learning_rate": 4.476169750839571e-07, |
|
"loss": 0.4831, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 4.912254160363086, |
|
"grad_norm": 0.8174405097961426, |
|
"learning_rate": 4.439478920839771e-07, |
|
"loss": 0.4755, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 4.9167927382753405, |
|
"grad_norm": 0.7877658009529114, |
|
"learning_rate": 4.402924427911459e-07, |
|
"loss": 0.5249, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 4.921331316187595, |
|
"grad_norm": 0.7509103417396545, |
|
"learning_rate": 4.366506514447932e-07, |
|
"loss": 0.5099, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 4.925869894099849, |
|
"grad_norm": 0.7400006651878357, |
|
"learning_rate": 4.3302254219368235e-07, |
|
"loss": 0.4449, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 4.930408472012103, |
|
"grad_norm": 0.7745682001113892, |
|
"learning_rate": 4.294081390958521e-07, |
|
"loss": 0.4939, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 4.934947049924357, |
|
"grad_norm": 0.7397611141204834, |
|
"learning_rate": 4.2580746611845273e-07, |
|
"loss": 0.5219, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.9394856278366115, |
|
"grad_norm": 0.8154904842376709, |
|
"learning_rate": 4.2222054713759315e-07, |
|
"loss": 0.4853, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 4.944024205748866, |
|
"grad_norm": 0.8152079582214355, |
|
"learning_rate": 4.186474059381768e-07, |
|
"loss": 0.5134, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 4.94856278366112, |
|
"grad_norm": 0.776576578617096, |
|
"learning_rate": 4.1508806621374825e-07, |
|
"loss": 0.4983, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 4.953101361573374, |
|
"grad_norm": 0.729992151260376, |
|
"learning_rate": 4.1154255156633267e-07, |
|
"loss": 0.4978, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 4.957639939485627, |
|
"grad_norm": 0.7563814520835876, |
|
"learning_rate": 4.0801088550628307e-07, |
|
"loss": 0.4817, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 4.962178517397882, |
|
"grad_norm": 1.2037073373794556, |
|
"learning_rate": 4.044930914521203e-07, |
|
"loss": 0.455, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 4.966717095310136, |
|
"grad_norm": 0.7728638648986816, |
|
"learning_rate": 4.0098919273037965e-07, |
|
"loss": 0.4898, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 4.97125567322239, |
|
"grad_norm": 0.7198495268821716, |
|
"learning_rate": 3.9749921257545926e-07, |
|
"loss": 0.5035, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 4.975794251134644, |
|
"grad_norm": 0.864120602607727, |
|
"learning_rate": 3.940231741294598e-07, |
|
"loss": 0.506, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 4.9803328290468984, |
|
"grad_norm": 0.7351332306861877, |
|
"learning_rate": 3.9056110044203594e-07, |
|
"loss": 0.5028, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.984871406959153, |
|
"grad_norm": 0.7667080163955688, |
|
"learning_rate": 3.8711301447024093e-07, |
|
"loss": 0.4922, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 4.989409984871407, |
|
"grad_norm": 0.8356844186782837, |
|
"learning_rate": 3.8367893907837683e-07, |
|
"loss": 0.5058, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 4.993948562783661, |
|
"grad_norm": 0.7784990072250366, |
|
"learning_rate": 3.802588970378387e-07, |
|
"loss": 0.4222, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 4.998487140695915, |
|
"grad_norm": 0.7764464616775513, |
|
"learning_rate": 3.7685291102696976e-07, |
|
"loss": 0.4688, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.7764464616775513, |
|
"learning_rate": 3.734610036309047e-07, |
|
"loss": 0.1767, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 5.004538577912254, |
|
"grad_norm": 1.046483039855957, |
|
"learning_rate": 3.7008319734142346e-07, |
|
"loss": 0.4978, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 5.009077155824508, |
|
"grad_norm": 0.7094864845275879, |
|
"learning_rate": 3.6671951455680254e-07, |
|
"loss": 0.45, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 5.013615733736763, |
|
"grad_norm": 0.8907707333564758, |
|
"learning_rate": 3.6336997758166263e-07, |
|
"loss": 0.4951, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 5.018154311649017, |
|
"grad_norm": 0.7162082195281982, |
|
"learning_rate": 3.6003460862682774e-07, |
|
"loss": 0.4909, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 5.022692889561271, |
|
"grad_norm": 0.9485985636711121, |
|
"learning_rate": 3.56713429809169e-07, |
|
"loss": 0.4624, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 5.027231467473525, |
|
"grad_norm": 0.7002444863319397, |
|
"learning_rate": 3.534064631514658e-07, |
|
"loss": 0.4572, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 5.031770045385779, |
|
"grad_norm": 0.6732192635536194, |
|
"learning_rate": 3.5011373058225413e-07, |
|
"loss": 0.4476, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 5.036308623298034, |
|
"grad_norm": 0.7230054140090942, |
|
"learning_rate": 3.468352539356856e-07, |
|
"loss": 0.4928, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 5.040847201210288, |
|
"grad_norm": 0.8661540746688843, |
|
"learning_rate": 3.435710549513793e-07, |
|
"loss": 0.4815, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 5.045385779122542, |
|
"grad_norm": 0.7192421555519104, |
|
"learning_rate": 3.403211552742788e-07, |
|
"loss": 0.4345, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 5.049924357034795, |
|
"grad_norm": 0.775611162185669, |
|
"learning_rate": 3.370855764545106e-07, |
|
"loss": 0.4514, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 5.05446293494705, |
|
"grad_norm": 0.666876494884491, |
|
"learning_rate": 3.3386433994723717e-07, |
|
"loss": 0.4622, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 5.059001512859304, |
|
"grad_norm": 0.9268916249275208, |
|
"learning_rate": 3.306574671125185e-07, |
|
"loss": 0.4624, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 5.063540090771558, |
|
"grad_norm": 1.5524824857711792, |
|
"learning_rate": 3.274649792151671e-07, |
|
"loss": 0.4963, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 5.068078668683812, |
|
"grad_norm": 0.9165545701980591, |
|
"learning_rate": 3.2428689742461187e-07, |
|
"loss": 0.4685, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 5.072617246596066, |
|
"grad_norm": 0.8844300508499146, |
|
"learning_rate": 3.2112324281475174e-07, |
|
"loss": 0.4791, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 5.077155824508321, |
|
"grad_norm": 0.6754626631736755, |
|
"learning_rate": 3.1797403636382035e-07, |
|
"loss": 0.4844, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 5.081694402420575, |
|
"grad_norm": 0.9919183850288391, |
|
"learning_rate": 3.1483929895424453e-07, |
|
"loss": 0.4755, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 5.086232980332829, |
|
"grad_norm": 0.7306275963783264, |
|
"learning_rate": 3.117190513725066e-07, |
|
"loss": 0.4958, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 5.090771558245083, |
|
"grad_norm": 0.7538716793060303, |
|
"learning_rate": 3.086133143090081e-07, |
|
"loss": 0.4927, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 5.095310136157337, |
|
"grad_norm": 0.851882815361023, |
|
"learning_rate": 3.055221083579296e-07, |
|
"loss": 0.4807, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 5.099848714069592, |
|
"grad_norm": 0.7952591776847839, |
|
"learning_rate": 3.024454540170968e-07, |
|
"loss": 0.4903, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 5.104387291981846, |
|
"grad_norm": 0.7629356384277344, |
|
"learning_rate": 2.993833716878422e-07, |
|
"loss": 0.4954, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 5.1089258698941, |
|
"grad_norm": 0.9297985434532166, |
|
"learning_rate": 2.9633588167487266e-07, |
|
"loss": 0.513, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 5.113464447806354, |
|
"grad_norm": 0.6958643198013306, |
|
"learning_rate": 2.933030041861312e-07, |
|
"loss": 0.4906, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 5.118003025718608, |
|
"grad_norm": 0.6689577698707581, |
|
"learning_rate": 2.9028475933266825e-07, |
|
"loss": 0.429, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 5.122541603630863, |
|
"grad_norm": 0.8975169658660889, |
|
"learning_rate": 2.8728116712850195e-07, |
|
"loss": 0.464, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 5.127080181543117, |
|
"grad_norm": 0.8286609649658203, |
|
"learning_rate": 2.8429224749048884e-07, |
|
"loss": 0.4497, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 5.131618759455371, |
|
"grad_norm": 0.8133575320243835, |
|
"learning_rate": 2.8131802023819376e-07, |
|
"loss": 0.4943, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 5.136157337367624, |
|
"grad_norm": 0.7451333403587341, |
|
"learning_rate": 2.783585050937537e-07, |
|
"loss": 0.4699, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 5.140695915279879, |
|
"grad_norm": 0.9283474087715149, |
|
"learning_rate": 2.7541372168175093e-07, |
|
"loss": 0.4896, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 5.145234493192133, |
|
"grad_norm": 0.7920743823051453, |
|
"learning_rate": 2.7248368952908055e-07, |
|
"loss": 0.4999, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 5.149773071104387, |
|
"grad_norm": 0.7597651481628418, |
|
"learning_rate": 2.695684280648228e-07, |
|
"loss": 0.486, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 5.154311649016641, |
|
"grad_norm": 0.7768572568893433, |
|
"learning_rate": 2.666679566201119e-07, |
|
"loss": 0.48, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 5.158850226928895, |
|
"grad_norm": 0.9812193512916565, |
|
"learning_rate": 2.6378229442801163e-07, |
|
"loss": 0.4836, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 5.16338880484115, |
|
"grad_norm": 0.7561526894569397, |
|
"learning_rate": 2.6091146062338176e-07, |
|
"loss": 0.4618, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 5.167927382753404, |
|
"grad_norm": 0.711685299873352, |
|
"learning_rate": 2.580554742427591e-07, |
|
"loss": 0.4736, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 5.172465960665658, |
|
"grad_norm": 0.712313711643219, |
|
"learning_rate": 2.5521435422422443e-07, |
|
"loss": 0.4547, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 5.177004538577912, |
|
"grad_norm": 0.839168131351471, |
|
"learning_rate": 2.523881194072791e-07, |
|
"loss": 0.5002, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 5.181543116490166, |
|
"grad_norm": 0.8970273733139038, |
|
"learning_rate": 2.4957678853272246e-07, |
|
"loss": 0.5014, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 5.186081694402421, |
|
"grad_norm": 0.6730303764343262, |
|
"learning_rate": 2.4678038024252363e-07, |
|
"loss": 0.4691, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 5.190620272314675, |
|
"grad_norm": 0.915203869342804, |
|
"learning_rate": 2.4399891307970187e-07, |
|
"loss": 0.4886, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 5.195158850226929, |
|
"grad_norm": 0.7596525549888611, |
|
"learning_rate": 2.4123240548819955e-07, |
|
"loss": 0.474, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 5.199697428139183, |
|
"grad_norm": 1.1874265670776367, |
|
"learning_rate": 2.384808758127638e-07, |
|
"loss": 0.4834, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 5.204236006051437, |
|
"grad_norm": 0.6951465606689453, |
|
"learning_rate": 2.357443422988215e-07, |
|
"loss": 0.4736, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 5.208774583963692, |
|
"grad_norm": 0.9026170969009399, |
|
"learning_rate": 2.330228230923612e-07, |
|
"loss": 0.4752, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 5.213313161875946, |
|
"grad_norm": 0.9337056875228882, |
|
"learning_rate": 2.3031633623980975e-07, |
|
"loss": 0.4745, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 5.2178517397882, |
|
"grad_norm": 0.6569899916648865, |
|
"learning_rate": 2.2762489968791601e-07, |
|
"loss": 0.4856, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 5.222390317700454, |
|
"grad_norm": 0.7478249669075012, |
|
"learning_rate": 2.249485312836294e-07, |
|
"loss": 0.5025, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 5.2269288956127085, |
|
"grad_norm": 0.7351001501083374, |
|
"learning_rate": 2.2228724877398134e-07, |
|
"loss": 0.4784, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 5.231467473524962, |
|
"grad_norm": 0.7316746115684509, |
|
"learning_rate": 2.1964106980597034e-07, |
|
"loss": 0.4769, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 5.236006051437216, |
|
"grad_norm": 1.1231337785720825, |
|
"learning_rate": 2.1701001192644067e-07, |
|
"loss": 0.4658, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 5.24054462934947, |
|
"grad_norm": 0.9594347476959229, |
|
"learning_rate": 2.1439409258197153e-07, |
|
"loss": 0.4819, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 5.245083207261724, |
|
"grad_norm": 0.7892640233039856, |
|
"learning_rate": 2.117933291187557e-07, |
|
"loss": 0.4498, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 5.249621785173979, |
|
"grad_norm": 0.8971346616744995, |
|
"learning_rate": 2.092077387824884e-07, |
|
"loss": 0.4792, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 5.254160363086233, |
|
"grad_norm": 0.7570569515228271, |
|
"learning_rate": 2.0663733871825105e-07, |
|
"loss": 0.4871, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 5.258698940998487, |
|
"grad_norm": 0.8948609828948975, |
|
"learning_rate": 2.0408214597039928e-07, |
|
"loss": 0.4615, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 5.263237518910741, |
|
"grad_norm": 0.716673731803894, |
|
"learning_rate": 2.015421774824472e-07, |
|
"loss": 0.4846, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 5.267776096822995, |
|
"grad_norm": 0.9955589175224304, |
|
"learning_rate": 1.9901745009695772e-07, |
|
"loss": 0.4749, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 5.27231467473525, |
|
"grad_norm": 0.7813113927841187, |
|
"learning_rate": 1.9650798055543014e-07, |
|
"loss": 0.4854, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 5.276853252647504, |
|
"grad_norm": 0.7000608444213867, |
|
"learning_rate": 1.9401378549818745e-07, |
|
"loss": 0.4826, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 5.281391830559758, |
|
"grad_norm": 0.7905756831169128, |
|
"learning_rate": 1.9153488146426897e-07, |
|
"loss": 0.4856, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 5.285930408472012, |
|
"grad_norm": 0.76811683177948, |
|
"learning_rate": 1.8907128489131732e-07, |
|
"loss": 0.5032, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 5.290468986384266, |
|
"grad_norm": 0.7297028303146362, |
|
"learning_rate": 1.8662301211547302e-07, |
|
"loss": 0.493, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 5.295007564296521, |
|
"grad_norm": 0.7701004147529602, |
|
"learning_rate": 1.8419007937126254e-07, |
|
"loss": 0.4593, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 5.299546142208775, |
|
"grad_norm": 0.7971721887588501, |
|
"learning_rate": 1.8177250279149405e-07, |
|
"loss": 0.4894, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 5.304084720121029, |
|
"grad_norm": 0.7243720293045044, |
|
"learning_rate": 1.7937029840714716e-07, |
|
"loss": 0.4852, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 5.308623298033283, |
|
"grad_norm": 0.8683739304542542, |
|
"learning_rate": 1.7698348214726935e-07, |
|
"loss": 0.4563, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 5.3131618759455375, |
|
"grad_norm": 0.7620447278022766, |
|
"learning_rate": 1.7461206983886952e-07, |
|
"loss": 0.4955, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 5.317700453857791, |
|
"grad_norm": 0.7649240493774414, |
|
"learning_rate": 1.7225607720681132e-07, |
|
"loss": 0.4707, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 5.322239031770045, |
|
"grad_norm": 0.9427205920219421, |
|
"learning_rate": 1.6991551987371208e-07, |
|
"loss": 0.4908, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 5.326777609682299, |
|
"grad_norm": 0.7606921195983887, |
|
"learning_rate": 1.6759041335983606e-07, |
|
"loss": 0.4538, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 5.331316187594553, |
|
"grad_norm": 0.7385324239730835, |
|
"learning_rate": 1.6528077308299388e-07, |
|
"loss": 0.4719, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 5.335854765506808, |
|
"grad_norm": 0.8069400787353516, |
|
"learning_rate": 1.6298661435843883e-07, |
|
"loss": 0.4702, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 5.340393343419062, |
|
"grad_norm": 0.8330691456794739, |
|
"learning_rate": 1.607079523987662e-07, |
|
"loss": 0.4583, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 5.344931921331316, |
|
"grad_norm": 0.682507336139679, |
|
"learning_rate": 1.5844480231381132e-07, |
|
"loss": 0.4721, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 5.34947049924357, |
|
"grad_norm": 0.790880560874939, |
|
"learning_rate": 1.5619717911055127e-07, |
|
"loss": 0.4693, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 5.354009077155824, |
|
"grad_norm": 0.8017518520355225, |
|
"learning_rate": 1.5396509769300294e-07, |
|
"loss": 0.4869, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 5.358547655068079, |
|
"grad_norm": 0.7758563160896301, |
|
"learning_rate": 1.5174857286212625e-07, |
|
"loss": 0.4819, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 5.363086232980333, |
|
"grad_norm": 1.1839865446090698, |
|
"learning_rate": 1.4954761931572526e-07, |
|
"loss": 0.5027, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 5.367624810892587, |
|
"grad_norm": 1.2727587223052979, |
|
"learning_rate": 1.4736225164835e-07, |
|
"loss": 0.4786, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 5.372163388804841, |
|
"grad_norm": 0.7882137298583984, |
|
"learning_rate": 1.4519248435120093e-07, |
|
"loss": 0.4763, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 5.376701966717095, |
|
"grad_norm": 0.7846785187721252, |
|
"learning_rate": 1.430383318120318e-07, |
|
"loss": 0.4685, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 5.38124054462935, |
|
"grad_norm": 0.7354263067245483, |
|
"learning_rate": 1.4089980831505535e-07, |
|
"loss": 0.4718, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 5.385779122541604, |
|
"grad_norm": 0.7932591438293457, |
|
"learning_rate": 1.3877692804084687e-07, |
|
"loss": 0.4555, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 5.390317700453858, |
|
"grad_norm": 0.7780957221984863, |
|
"learning_rate": 1.366697050662527e-07, |
|
"loss": 0.5104, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 5.394856278366112, |
|
"grad_norm": 0.6624619960784912, |
|
"learning_rate": 1.345781533642937e-07, |
|
"loss": 0.5044, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 5.3993948562783665, |
|
"grad_norm": 0.736815869808197, |
|
"learning_rate": 1.325022868040765e-07, |
|
"loss": 0.4912, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 5.403933434190621, |
|
"grad_norm": 1.2100348472595215, |
|
"learning_rate": 1.3044211915069715e-07, |
|
"loss": 0.506, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 5.408472012102875, |
|
"grad_norm": 0.7347081303596497, |
|
"learning_rate": 1.2839766406515296e-07, |
|
"loss": 0.4628, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 5.413010590015128, |
|
"grad_norm": 0.8266773819923401, |
|
"learning_rate": 1.2636893510425185e-07, |
|
"loss": 0.4952, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 5.417549167927382, |
|
"grad_norm": 0.7447771430015564, |
|
"learning_rate": 1.2435594572052012e-07, |
|
"loss": 0.4908, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 5.422087745839637, |
|
"grad_norm": 0.8191699385643005, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 0.4815, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 5.426626323751891, |
|
"grad_norm": 0.7668866515159607, |
|
"learning_rate": 1.2037723897273895e-07, |
|
"loss": 0.4883, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 5.431164901664145, |
|
"grad_norm": 1.9579761028289795, |
|
"learning_rate": 1.1841154799154376e-07, |
|
"loss": 0.4937, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 5.435703479576399, |
|
"grad_norm": 0.7861865758895874, |
|
"learning_rate": 1.16461649353051e-07, |
|
"loss": 0.4899, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 5.440242057488653, |
|
"grad_norm": 0.7184910774230957, |
|
"learning_rate": 1.1452755598706317e-07, |
|
"loss": 0.4833, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 5.444780635400908, |
|
"grad_norm": 0.771675169467926, |
|
"learning_rate": 1.1260928071857746e-07, |
|
"loss": 0.4449, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 5.449319213313162, |
|
"grad_norm": 0.8247400522232056, |
|
"learning_rate": 1.1070683626770163e-07, |
|
"loss": 0.4723, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 5.453857791225416, |
|
"grad_norm": 0.6917076706886292, |
|
"learning_rate": 1.0882023524956764e-07, |
|
"loss": 0.4666, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 5.45839636913767, |
|
"grad_norm": 0.7408576607704163, |
|
"learning_rate": 1.0694949017425043e-07, |
|
"loss": 0.4573, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 5.462934947049924, |
|
"grad_norm": 1.0706077814102173, |
|
"learning_rate": 1.0509461344668486e-07, |
|
"loss": 0.4985, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 5.467473524962179, |
|
"grad_norm": 0.8229323029518127, |
|
"learning_rate": 1.0325561736658052e-07, |
|
"loss": 0.4774, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 5.472012102874433, |
|
"grad_norm": 1.0152099132537842, |
|
"learning_rate": 1.0143251412834426e-07, |
|
"loss": 0.5089, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 5.476550680786687, |
|
"grad_norm": 0.7047984600067139, |
|
"learning_rate": 9.96253158209956e-08, |
|
"loss": 0.4803, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 5.481089258698941, |
|
"grad_norm": 0.932101309299469, |
|
"learning_rate": 9.783403442808953e-08, |
|
"loss": 0.4897, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 5.4856278366111955, |
|
"grad_norm": 0.7570725679397583, |
|
"learning_rate": 9.60586818276349e-08, |
|
"loss": 0.4971, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 5.49016641452345, |
|
"grad_norm": 0.7186459302902222, |
|
"learning_rate": 9.429926979201753e-08, |
|
"loss": 0.4853, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 5.494704992435704, |
|
"grad_norm": 0.8389477133750916, |
|
"learning_rate": 9.255580998792008e-08, |
|
"loss": 0.4735, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 5.499243570347957, |
|
"grad_norm": 0.7484689950942993, |
|
"learning_rate": 9.082831397624586e-08, |
|
"loss": 0.4973, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 5.503782148260212, |
|
"grad_norm": 0.8444059491157532, |
|
"learning_rate": 8.911679321204347e-08, |
|
"loss": 0.4565, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 5.508320726172466, |
|
"grad_norm": 0.6689127683639526, |
|
"learning_rate": 8.742125904442705e-08, |
|
"loss": 0.4854, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 5.51285930408472, |
|
"grad_norm": 0.7082827687263489, |
|
"learning_rate": 8.574172271650639e-08, |
|
"loss": 0.4535, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 5.517397881996974, |
|
"grad_norm": 0.8333298563957214, |
|
"learning_rate": 8.407819536530665e-08, |
|
"loss": 0.459, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 5.521936459909228, |
|
"grad_norm": 0.7178226709365845, |
|
"learning_rate": 8.243068802169906e-08, |
|
"loss": 0.4836, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 5.526475037821482, |
|
"grad_norm": 0.8268416523933411, |
|
"learning_rate": 8.079921161032511e-08, |
|
"loss": 0.5056, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 5.531013615733737, |
|
"grad_norm": 0.6459931135177612, |
|
"learning_rate": 7.918377694952518e-08, |
|
"loss": 0.4748, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 5.535552193645991, |
|
"grad_norm": 0.7851235270500183, |
|
"learning_rate": 7.758439475126666e-08, |
|
"loss": 0.4837, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 5.540090771558245, |
|
"grad_norm": 0.8119810223579407, |
|
"learning_rate": 7.600107562107246e-08, |
|
"loss": 0.4597, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 5.544629349470499, |
|
"grad_norm": 0.7369126081466675, |
|
"learning_rate": 7.443383005795224e-08, |
|
"loss": 0.4659, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 5.549167927382753, |
|
"grad_norm": 0.7626518607139587, |
|
"learning_rate": 7.288266845433017e-08, |
|
"loss": 0.4847, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 5.553706505295008, |
|
"grad_norm": 1.6847354173660278, |
|
"learning_rate": 7.134760109597877e-08, |
|
"loss": 0.483, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 5.558245083207262, |
|
"grad_norm": 0.9522053003311157, |
|
"learning_rate": 6.982863816194785e-08, |
|
"loss": 0.4813, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 5.562783661119516, |
|
"grad_norm": 0.7725521922111511, |
|
"learning_rate": 6.832578972450099e-08, |
|
"loss": 0.4923, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 5.56732223903177, |
|
"grad_norm": 0.7546195387840271, |
|
"learning_rate": 6.683906574904364e-08, |
|
"loss": 0.4927, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 5.5718608169440245, |
|
"grad_norm": 0.928009033203125, |
|
"learning_rate": 6.536847609406178e-08, |
|
"loss": 0.4875, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 5.576399394856279, |
|
"grad_norm": 0.75600665807724, |
|
"learning_rate": 6.39140305110536e-08, |
|
"loss": 0.4873, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 5.580937972768533, |
|
"grad_norm": 0.9023406505584717, |
|
"learning_rate": 6.247573864446522e-08, |
|
"loss": 0.4901, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 5.585476550680786, |
|
"grad_norm": 0.751864492893219, |
|
"learning_rate": 6.105361003162891e-08, |
|
"loss": 0.4785, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 5.590015128593041, |
|
"grad_norm": 0.7082744836807251, |
|
"learning_rate": 5.964765410269635e-08, |
|
"loss": 0.4884, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 5.594553706505295, |
|
"grad_norm": 0.6925091743469238, |
|
"learning_rate": 5.825788018057971e-08, |
|
"loss": 0.4762, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 5.599092284417549, |
|
"grad_norm": 0.7400352358818054, |
|
"learning_rate": 5.688429748088614e-08, |
|
"loss": 0.469, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 5.603630862329803, |
|
"grad_norm": 0.7220975756645203, |
|
"learning_rate": 5.552691511186065e-08, |
|
"loss": 0.453, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 5.608169440242057, |
|
"grad_norm": 0.8350204825401306, |
|
"learning_rate": 5.4185742074321645e-08, |
|
"loss": 0.4814, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 5.612708018154311, |
|
"grad_norm": 0.7439131736755371, |
|
"learning_rate": 5.2860787261605485e-08, |
|
"loss": 0.479, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 5.617246596066566, |
|
"grad_norm": 0.8671088814735413, |
|
"learning_rate": 5.1552059459503144e-08, |
|
"loss": 0.4823, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 5.62178517397882, |
|
"grad_norm": 0.7441071271896362, |
|
"learning_rate": 5.0259567346205016e-08, |
|
"loss": 0.4579, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 5.626323751891074, |
|
"grad_norm": 1.0275096893310547, |
|
"learning_rate": 4.898331949224289e-08, |
|
"loss": 0.5064, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 5.630862329803328, |
|
"grad_norm": 0.8689181208610535, |
|
"learning_rate": 4.7723324360431644e-08, |
|
"loss": 0.462, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 5.635400907715582, |
|
"grad_norm": 0.7431532144546509, |
|
"learning_rate": 4.647959030581517e-08, |
|
"loss": 0.4923, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 5.639939485627837, |
|
"grad_norm": 0.9355483651161194, |
|
"learning_rate": 4.525212557560887e-08, |
|
"loss": 0.459, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 5.644478063540091, |
|
"grad_norm": 0.7806388735771179, |
|
"learning_rate": 4.404093830914752e-08, |
|
"loss": 0.4567, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 5.649016641452345, |
|
"grad_norm": 0.7851375937461853, |
|
"learning_rate": 4.284603653782832e-08, |
|
"loss": 0.4641, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 5.653555219364599, |
|
"grad_norm": 0.826127290725708, |
|
"learning_rate": 4.1667428185060145e-08, |
|
"loss": 0.4824, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 5.6580937972768535, |
|
"grad_norm": 0.7516127824783325, |
|
"learning_rate": 4.050512106620913e-08, |
|
"loss": 0.4798, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 5.662632375189108, |
|
"grad_norm": 0.7157534956932068, |
|
"learning_rate": 3.9359122888548984e-08, |
|
"loss": 0.4948, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 5.667170953101362, |
|
"grad_norm": 0.9715742468833923, |
|
"learning_rate": 3.822944125120742e-08, |
|
"loss": 0.4436, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 5.671709531013616, |
|
"grad_norm": 0.8162844777107239, |
|
"learning_rate": 3.711608364511787e-08, |
|
"loss": 0.506, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 5.67624810892587, |
|
"grad_norm": 0.9915447235107422, |
|
"learning_rate": 3.601905745296813e-08, |
|
"loss": 0.5134, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 5.680786686838124, |
|
"grad_norm": 0.8082160353660583, |
|
"learning_rate": 3.4938369949152616e-08, |
|
"loss": 0.4826, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 5.685325264750379, |
|
"grad_norm": 2.119804859161377, |
|
"learning_rate": 3.3874028299723805e-08, |
|
"loss": 0.5038, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 5.689863842662632, |
|
"grad_norm": 0.7752223610877991, |
|
"learning_rate": 3.2826039562343924e-08, |
|
"loss": 0.4629, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 5.694402420574886, |
|
"grad_norm": 0.7426053285598755, |
|
"learning_rate": 3.1794410686240006e-08, |
|
"loss": 0.4749, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 5.69894099848714, |
|
"grad_norm": 0.8134835362434387, |
|
"learning_rate": 3.077914851215585e-08, |
|
"loss": 0.4465, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 5.703479576399395, |
|
"grad_norm": 0.7048810720443726, |
|
"learning_rate": 2.9780259772307362e-08, |
|
"loss": 0.4684, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 5.708018154311649, |
|
"grad_norm": 0.6710391640663147, |
|
"learning_rate": 2.879775109033811e-08, |
|
"loss": 0.472, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 5.712556732223903, |
|
"grad_norm": 0.8969804048538208, |
|
"learning_rate": 2.7831628981275504e-08, |
|
"loss": 0.4861, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 5.717095310136157, |
|
"grad_norm": 1.4039125442504883, |
|
"learning_rate": 2.6881899851487758e-08, |
|
"loss": 0.4634, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 5.721633888048411, |
|
"grad_norm": 0.6740471124649048, |
|
"learning_rate": 2.5948569998640037e-08, |
|
"loss": 0.49, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 5.726172465960666, |
|
"grad_norm": 0.671715497970581, |
|
"learning_rate": 2.5031645611654497e-08, |
|
"loss": 0.4782, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 5.73071104387292, |
|
"grad_norm": 0.7497738003730774, |
|
"learning_rate": 2.413113277066781e-08, |
|
"loss": 0.4396, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 5.735249621785174, |
|
"grad_norm": 0.739037036895752, |
|
"learning_rate": 2.3247037446992304e-08, |
|
"loss": 0.4561, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 5.739788199697428, |
|
"grad_norm": 0.7688632607460022, |
|
"learning_rate": 2.2379365503074902e-08, |
|
"loss": 0.4919, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 5.7443267776096825, |
|
"grad_norm": 1.2213677167892456, |
|
"learning_rate": 2.1528122692459074e-08, |
|
"loss": 0.4927, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 5.748865355521937, |
|
"grad_norm": 0.7246401309967041, |
|
"learning_rate": 2.0693314659746276e-08, |
|
"loss": 0.4709, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 5.753403933434191, |
|
"grad_norm": 0.7174831628799438, |
|
"learning_rate": 1.9874946940559305e-08, |
|
"loss": 0.4728, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 5.757942511346445, |
|
"grad_norm": 0.74196857213974, |
|
"learning_rate": 1.907302496150454e-08, |
|
"loss": 0.4766, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 5.762481089258699, |
|
"grad_norm": 0.8071885108947754, |
|
"learning_rate": 1.8287554040136714e-08, |
|
"loss": 0.4954, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 5.7670196671709535, |
|
"grad_norm": 0.9311023950576782, |
|
"learning_rate": 1.751853938492337e-08, |
|
"loss": 0.4315, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 5.771558245083208, |
|
"grad_norm": 0.8586441278457642, |
|
"learning_rate": 1.6765986095209906e-08, |
|
"loss": 0.5136, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 5.776096822995461, |
|
"grad_norm": 0.7332836985588074, |
|
"learning_rate": 1.602989916118708e-08, |
|
"loss": 0.5048, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 5.780635400907715, |
|
"grad_norm": 1.2354652881622314, |
|
"learning_rate": 1.531028346385577e-08, |
|
"loss": 0.4652, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 5.785173978819969, |
|
"grad_norm": 0.6708296537399292, |
|
"learning_rate": 1.4607143774997557e-08, |
|
"loss": 0.4953, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 5.789712556732224, |
|
"grad_norm": 0.8077536821365356, |
|
"learning_rate": 1.3920484757139751e-08, |
|
"loss": 0.5044, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 5.794251134644478, |
|
"grad_norm": 0.9781695604324341, |
|
"learning_rate": 1.3250310963527358e-08, |
|
"loss": 0.5122, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 5.798789712556732, |
|
"grad_norm": 0.7283720374107361, |
|
"learning_rate": 1.2596626838090875e-08, |
|
"loss": 0.4435, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 5.803328290468986, |
|
"grad_norm": 0.7447962760925293, |
|
"learning_rate": 1.1959436715417993e-08, |
|
"loss": 0.4749, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 5.80786686838124, |
|
"grad_norm": 0.7438759207725525, |
|
"learning_rate": 1.1338744820724168e-08, |
|
"loss": 0.4774, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 5.812405446293495, |
|
"grad_norm": 0.7419090270996094, |
|
"learning_rate": 1.0734555269825141e-08, |
|
"loss": 0.4876, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 5.816944024205749, |
|
"grad_norm": 0.8018777966499329, |
|
"learning_rate": 1.0146872069109748e-08, |
|
"loss": 0.48, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 5.821482602118003, |
|
"grad_norm": 0.7206845879554749, |
|
"learning_rate": 9.575699115512149e-09, |
|
"loss": 0.4402, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 5.826021180030257, |
|
"grad_norm": 0.8025342226028442, |
|
"learning_rate": 9.021040196487419e-09, |
|
"loss": 0.5233, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 5.8305597579425115, |
|
"grad_norm": 0.7229884266853333, |
|
"learning_rate": 8.482898989985721e-09, |
|
"loss": 0.5087, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 5.835098335854766, |
|
"grad_norm": 0.809455931186676, |
|
"learning_rate": 7.961279064428452e-09, |
|
"loss": 0.4836, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 5.83963691376702, |
|
"grad_norm": 0.8334141969680786, |
|
"learning_rate": 7.456183878683243e-09, |
|
"loss": 0.4724, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 5.844175491679274, |
|
"grad_norm": 2.202322244644165, |
|
"learning_rate": 6.967616782043163e-09, |
|
"loss": 0.48, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 5.848714069591528, |
|
"grad_norm": 0.7208507061004639, |
|
"learning_rate": 6.495581014202557e-09, |
|
"loss": 0.4551, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 5.8532526475037825, |
|
"grad_norm": 0.8301936984062195, |
|
"learning_rate": 6.040079705235957e-09, |
|
"loss": 0.4666, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 5.857791225416037, |
|
"grad_norm": 0.7231348752975464, |
|
"learning_rate": 5.601115875578933e-09, |
|
"loss": 0.4478, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 5.86232980332829, |
|
"grad_norm": 0.740260124206543, |
|
"learning_rate": 5.178692436005883e-09, |
|
"loss": 0.4785, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 5.866868381240545, |
|
"grad_norm": 0.7141087651252747, |
|
"learning_rate": 4.772812187611719e-09, |
|
"loss": 0.5093, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 5.871406959152798, |
|
"grad_norm": 0.6469274759292603, |
|
"learning_rate": 4.383477821793547e-09, |
|
"loss": 0.511, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 5.875945537065053, |
|
"grad_norm": 0.7152246832847595, |
|
"learning_rate": 4.010691920232623e-09, |
|
"loss": 0.5036, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 5.880484114977307, |
|
"grad_norm": 0.8838086128234863, |
|
"learning_rate": 3.6544569548763175e-09, |
|
"loss": 0.4956, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 5.885022692889561, |
|
"grad_norm": 0.7219541072845459, |
|
"learning_rate": 3.3147752879236773e-09, |
|
"loss": 0.5097, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 5.889561270801815, |
|
"grad_norm": 1.4032868146896362, |
|
"learning_rate": 2.991649171807942e-09, |
|
"loss": 0.4439, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 5.8940998487140694, |
|
"grad_norm": 1.0518102645874023, |
|
"learning_rate": 2.6850807491823893e-09, |
|
"loss": 0.4909, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 5.898638426626324, |
|
"grad_norm": 0.7985177636146545, |
|
"learning_rate": 2.3950720529059003e-09, |
|
"loss": 0.4639, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 5.903177004538578, |
|
"grad_norm": 0.8552533984184265, |
|
"learning_rate": 2.1216250060296374e-09, |
|
"loss": 0.4552, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 5.907715582450832, |
|
"grad_norm": 0.711228609085083, |
|
"learning_rate": 1.8647414217848325e-09, |
|
"loss": 0.4411, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 5.912254160363086, |
|
"grad_norm": 0.7168917655944824, |
|
"learning_rate": 1.6244230035694642e-09, |
|
"loss": 0.4903, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 5.9167927382753405, |
|
"grad_norm": 0.7238544225692749, |
|
"learning_rate": 1.400671344937987e-09, |
|
"loss": 0.4901, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 5.921331316187595, |
|
"grad_norm": 1.0520352125167847, |
|
"learning_rate": 1.1934879295905089e-09, |
|
"loss": 0.4943, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 5.925869894099849, |
|
"grad_norm": 0.7716150283813477, |
|
"learning_rate": 1.0028741313627965e-09, |
|
"loss": 0.4732, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 5.930408472012103, |
|
"grad_norm": 0.8260288238525391, |
|
"learning_rate": 8.28831214217396e-10, |
|
"loss": 0.4676, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 5.934947049924357, |
|
"grad_norm": 0.69765704870224, |
|
"learning_rate": 6.713603322347495e-10, |
|
"loss": 0.489, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 5.9394856278366115, |
|
"grad_norm": 1.1246147155761719, |
|
"learning_rate": 5.304625296065346e-10, |
|
"loss": 0.4862, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 5.944024205748866, |
|
"grad_norm": 0.7668100595474243, |
|
"learning_rate": 4.0613874062761515e-10, |
|
"loss": 0.4825, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 5.94856278366112, |
|
"grad_norm": 0.8186943531036377, |
|
"learning_rate": 2.983897896910448e-10, |
|
"loss": 0.4983, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 5.953101361573374, |
|
"grad_norm": 0.7153812050819397, |
|
"learning_rate": 2.0721639128085113e-10, |
|
"loss": 0.4683, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 5.957639939485627, |
|
"grad_norm": 0.9169692397117615, |
|
"learning_rate": 1.3261914996953728e-10, |
|
"loss": 0.4744, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 5.962178517397882, |
|
"grad_norm": 0.7419823408126831, |
|
"learning_rate": 7.459856041169833e-11, |
|
"loss": 0.5012, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 5.966717095310136, |
|
"grad_norm": 0.8549036383628845, |
|
"learning_rate": 3.315500734318855e-11, |
|
"loss": 0.4881, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 5.97125567322239, |
|
"grad_norm": 0.6958428621292114, |
|
"learning_rate": 8.288765576403013e-12, |
|
"loss": 0.4775, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 5.975794251134644, |
|
"grad_norm": 0.7530087828636169, |
|
"learning_rate": 0.0, |
|
"loss": 0.4666, |
|
"step": 1320 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1320, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 6, |
|
"save_steps": 220, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.99618736616217e+19, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|