|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.9848714069591527, |
|
"eval_steps": 500, |
|
"global_step": 880, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0045385779122541605, |
|
"grad_norm": 12.459190368652344, |
|
"learning_rate": 5.0000000000000004e-08, |
|
"loss": 4.6369, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009077155824508321, |
|
"grad_norm": 12.756917953491211, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 4.6738, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01361573373676248, |
|
"grad_norm": 15.50593090057373, |
|
"learning_rate": 1.5000000000000002e-07, |
|
"loss": 4.6681, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.018154311649016642, |
|
"grad_norm": 12.976861000061035, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 4.246, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0226928895612708, |
|
"grad_norm": 14.594775199890137, |
|
"learning_rate": 2.5000000000000004e-07, |
|
"loss": 4.4452, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02723146747352496, |
|
"grad_norm": 16.087888717651367, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 4.5528, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03177004538577912, |
|
"grad_norm": 13.62125301361084, |
|
"learning_rate": 3.5000000000000004e-07, |
|
"loss": 4.5628, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.036308623298033284, |
|
"grad_norm": 17.010616302490234, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 4.5768, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04084720121028744, |
|
"grad_norm": 14.567586898803711, |
|
"learning_rate": 4.5000000000000003e-07, |
|
"loss": 4.3937, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0453857791225416, |
|
"grad_norm": 15.238479614257812, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 4.495, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.049924357034795766, |
|
"grad_norm": 13.89387321472168, |
|
"learning_rate": 5.5e-07, |
|
"loss": 4.4705, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05446293494704992, |
|
"grad_norm": 13.949710845947266, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 4.328, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.059001512859304085, |
|
"grad_norm": 13.15122127532959, |
|
"learning_rate": 6.5e-07, |
|
"loss": 4.4296, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06354009077155824, |
|
"grad_norm": 14.076905250549316, |
|
"learning_rate": 7.000000000000001e-07, |
|
"loss": 4.5285, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0680786686838124, |
|
"grad_norm": 14.299891471862793, |
|
"learning_rate": 7.5e-07, |
|
"loss": 4.5849, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07261724659606657, |
|
"grad_norm": 13.573644638061523, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 4.3638, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07715582450832073, |
|
"grad_norm": 14.140484809875488, |
|
"learning_rate": 8.500000000000001e-07, |
|
"loss": 4.4449, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08169440242057488, |
|
"grad_norm": 11.941351890563965, |
|
"learning_rate": 9.000000000000001e-07, |
|
"loss": 4.3646, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.08623298033282904, |
|
"grad_norm": 10.630327224731445, |
|
"learning_rate": 9.500000000000001e-07, |
|
"loss": 4.2902, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0907715582450832, |
|
"grad_norm": 11.662637710571289, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 4.1961, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09531013615733737, |
|
"grad_norm": 12.008113861083984, |
|
"learning_rate": 1.0500000000000001e-06, |
|
"loss": 4.1941, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.09984871406959153, |
|
"grad_norm": 12.686023712158203, |
|
"learning_rate": 1.1e-06, |
|
"loss": 4.1346, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.1043872919818457, |
|
"grad_norm": 12.529243469238281, |
|
"learning_rate": 1.1500000000000002e-06, |
|
"loss": 4.1137, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.10892586989409984, |
|
"grad_norm": 11.19096565246582, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 4.0772, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.11346444780635401, |
|
"grad_norm": 11.182024955749512, |
|
"learning_rate": 1.25e-06, |
|
"loss": 3.8888, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.11800302571860817, |
|
"grad_norm": 9.899381637573242, |
|
"learning_rate": 1.3e-06, |
|
"loss": 4.0117, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.12254160363086233, |
|
"grad_norm": 9.448798179626465, |
|
"learning_rate": 1.3500000000000002e-06, |
|
"loss": 3.8384, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.12708018154311648, |
|
"grad_norm": 10.632583618164062, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 3.7857, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.13161875945537066, |
|
"grad_norm": 10.56238079071045, |
|
"learning_rate": 1.45e-06, |
|
"loss": 3.4783, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1361573373676248, |
|
"grad_norm": 12.410117149353027, |
|
"learning_rate": 1.5e-06, |
|
"loss": 3.4828, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.14069591527987896, |
|
"grad_norm": 10.183599472045898, |
|
"learning_rate": 1.5500000000000002e-06, |
|
"loss": 3.6337, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.14523449319213314, |
|
"grad_norm": 9.421585083007812, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 3.4453, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.14977307110438728, |
|
"grad_norm": 9.230025291442871, |
|
"learning_rate": 1.6500000000000003e-06, |
|
"loss": 3.3481, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.15431164901664146, |
|
"grad_norm": 8.295567512512207, |
|
"learning_rate": 1.7000000000000002e-06, |
|
"loss": 3.3145, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.1588502269288956, |
|
"grad_norm": 9.138203620910645, |
|
"learning_rate": 1.75e-06, |
|
"loss": 3.1872, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.16338880484114976, |
|
"grad_norm": 11.864872932434082, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 2.9837, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.16792738275340394, |
|
"grad_norm": 12.373150825500488, |
|
"learning_rate": 1.85e-06, |
|
"loss": 3.0049, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.17246596066565809, |
|
"grad_norm": 21.665483474731445, |
|
"learning_rate": 1.9000000000000002e-06, |
|
"loss": 2.7664, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.17700453857791226, |
|
"grad_norm": 22.663740158081055, |
|
"learning_rate": 1.9500000000000004e-06, |
|
"loss": 2.7272, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1815431164901664, |
|
"grad_norm": 26.674400329589844, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.8077, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.18608169440242056, |
|
"grad_norm": 28.503612518310547, |
|
"learning_rate": 2.05e-06, |
|
"loss": 2.7306, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.19062027231467474, |
|
"grad_norm": 29.03409767150879, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 2.7405, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1951588502269289, |
|
"grad_norm": 17.844894409179688, |
|
"learning_rate": 2.15e-06, |
|
"loss": 2.6464, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.19969742813918306, |
|
"grad_norm": 19.220829010009766, |
|
"learning_rate": 2.2e-06, |
|
"loss": 2.4554, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.2042360060514372, |
|
"grad_norm": 25.161415100097656, |
|
"learning_rate": 2.25e-06, |
|
"loss": 2.2773, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2087745839636914, |
|
"grad_norm": 17.46828269958496, |
|
"learning_rate": 2.3000000000000004e-06, |
|
"loss": 2.3392, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.21331316187594554, |
|
"grad_norm": 13.194672584533691, |
|
"learning_rate": 2.35e-06, |
|
"loss": 2.0409, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.2178517397881997, |
|
"grad_norm": 7.290500164031982, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 2.1879, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.22239031770045387, |
|
"grad_norm": 7.56943941116333, |
|
"learning_rate": 2.4500000000000003e-06, |
|
"loss": 2.1656, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.22692889561270801, |
|
"grad_norm": 8.335527420043945, |
|
"learning_rate": 2.5e-06, |
|
"loss": 2.1294, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2314674735249622, |
|
"grad_norm": 10.013853073120117, |
|
"learning_rate": 2.55e-06, |
|
"loss": 2.1816, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.23600605143721634, |
|
"grad_norm": 8.674482345581055, |
|
"learning_rate": 2.6e-06, |
|
"loss": 2.0078, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.2405446293494705, |
|
"grad_norm": 6.09174919128418, |
|
"learning_rate": 2.6500000000000005e-06, |
|
"loss": 2.1328, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.24508320726172467, |
|
"grad_norm": 7.3330488204956055, |
|
"learning_rate": 2.7000000000000004e-06, |
|
"loss": 1.8961, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.24962178517397882, |
|
"grad_norm": 8.764911651611328, |
|
"learning_rate": 2.7500000000000004e-06, |
|
"loss": 1.7026, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.25416036308623297, |
|
"grad_norm": 12.413115501403809, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.8172, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.2586989409984871, |
|
"grad_norm": 9.009276390075684, |
|
"learning_rate": 2.85e-06, |
|
"loss": 1.6527, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2632375189107413, |
|
"grad_norm": 9.965579986572266, |
|
"learning_rate": 2.9e-06, |
|
"loss": 1.6348, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.26777609682299547, |
|
"grad_norm": 7.091963768005371, |
|
"learning_rate": 2.95e-06, |
|
"loss": 1.7016, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2723146747352496, |
|
"grad_norm": 5.276648998260498, |
|
"learning_rate": 3e-06, |
|
"loss": 1.7088, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.27685325264750377, |
|
"grad_norm": 9.634596824645996, |
|
"learning_rate": 3.05e-06, |
|
"loss": 1.4961, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2813918305597579, |
|
"grad_norm": 6.663750648498535, |
|
"learning_rate": 3.1000000000000004e-06, |
|
"loss": 1.522, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2859304084720121, |
|
"grad_norm": 36.79426956176758, |
|
"learning_rate": 3.1500000000000003e-06, |
|
"loss": 1.4855, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.29046898638426627, |
|
"grad_norm": 31.88567352294922, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.5058, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.2950075642965204, |
|
"grad_norm": 12.319960594177246, |
|
"learning_rate": 3.2500000000000002e-06, |
|
"loss": 1.4547, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.29954614220877457, |
|
"grad_norm": 4.246046543121338, |
|
"learning_rate": 3.3000000000000006e-06, |
|
"loss": 1.3153, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3040847201210287, |
|
"grad_norm": 5.3949503898620605, |
|
"learning_rate": 3.3500000000000005e-06, |
|
"loss": 1.4237, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3086232980332829, |
|
"grad_norm": 9.975737571716309, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 1.327, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.31316187594553707, |
|
"grad_norm": 6.356144428253174, |
|
"learning_rate": 3.45e-06, |
|
"loss": 1.2685, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.3177004538577912, |
|
"grad_norm": 3.9029836654663086, |
|
"learning_rate": 3.5e-06, |
|
"loss": 1.3856, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.32223903177004537, |
|
"grad_norm": 4.215930938720703, |
|
"learning_rate": 3.5500000000000003e-06, |
|
"loss": 1.1844, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3267776096822995, |
|
"grad_norm": 27.889633178710938, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.2535, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3313161875945537, |
|
"grad_norm": 23.77273178100586, |
|
"learning_rate": 3.65e-06, |
|
"loss": 1.3189, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3358547655068079, |
|
"grad_norm": 9.203927040100098, |
|
"learning_rate": 3.7e-06, |
|
"loss": 1.1993, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.340393343419062, |
|
"grad_norm": 3.1091806888580322, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 1.1804, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.34493192133131617, |
|
"grad_norm": 9.0440034866333, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 1.2318, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3494704992435703, |
|
"grad_norm": 7.17051362991333, |
|
"learning_rate": 3.85e-06, |
|
"loss": 1.2018, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3540090771558245, |
|
"grad_norm": 8.164457321166992, |
|
"learning_rate": 3.900000000000001e-06, |
|
"loss": 1.0911, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.3585476550680787, |
|
"grad_norm": 3.3671042919158936, |
|
"learning_rate": 3.95e-06, |
|
"loss": 1.1747, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.3630862329803328, |
|
"grad_norm": 4.223191261291504, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.1141, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.367624810892587, |
|
"grad_norm": 10.657241821289062, |
|
"learning_rate": 4.05e-06, |
|
"loss": 1.0506, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3721633888048411, |
|
"grad_norm": 2.664783239364624, |
|
"learning_rate": 4.1e-06, |
|
"loss": 1.1271, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3767019667170953, |
|
"grad_norm": 5.334985733032227, |
|
"learning_rate": 4.15e-06, |
|
"loss": 1.0812, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3812405446293495, |
|
"grad_norm": 7.471070289611816, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 1.1027, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.3857791225416036, |
|
"grad_norm": 3.270421028137207, |
|
"learning_rate": 4.25e-06, |
|
"loss": 1.054, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3903177004538578, |
|
"grad_norm": 1.9382085800170898, |
|
"learning_rate": 4.3e-06, |
|
"loss": 0.975, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.394856278366112, |
|
"grad_norm": 24.2485408782959, |
|
"learning_rate": 4.350000000000001e-06, |
|
"loss": 1.0288, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.39939485627836613, |
|
"grad_norm": 42.359432220458984, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.111, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.4039334341906203, |
|
"grad_norm": 25.72220230102539, |
|
"learning_rate": 4.450000000000001e-06, |
|
"loss": 1.0472, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.4084720121028744, |
|
"grad_norm": 3.1196579933166504, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.0055, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4130105900151286, |
|
"grad_norm": 4.584676742553711, |
|
"learning_rate": 4.5500000000000005e-06, |
|
"loss": 0.9848, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.4175491679273828, |
|
"grad_norm": 6.140016078948975, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 0.948, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.42208774583963693, |
|
"grad_norm": 5.266363620758057, |
|
"learning_rate": 4.65e-06, |
|
"loss": 0.9261, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.4266263237518911, |
|
"grad_norm": 3.6768720149993896, |
|
"learning_rate": 4.7e-06, |
|
"loss": 0.9772, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.43116490166414523, |
|
"grad_norm": 8.903961181640625, |
|
"learning_rate": 4.75e-06, |
|
"loss": 0.9514, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4357034795763994, |
|
"grad_norm": 15.684305191040039, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.9629, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.4402420574886536, |
|
"grad_norm": 21.329519271850586, |
|
"learning_rate": 4.85e-06, |
|
"loss": 1.0118, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.44478063540090773, |
|
"grad_norm": 7.824005603790283, |
|
"learning_rate": 4.9000000000000005e-06, |
|
"loss": 0.9094, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4493192133131619, |
|
"grad_norm": 2.283174514770508, |
|
"learning_rate": 4.95e-06, |
|
"loss": 0.8877, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.45385779122541603, |
|
"grad_norm": 5.028634548187256, |
|
"learning_rate": 5e-06, |
|
"loss": 0.8946, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4583963691376702, |
|
"grad_norm": 6.072951793670654, |
|
"learning_rate": 4.9999917112344245e-06, |
|
"loss": 0.9248, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.4629349470499244, |
|
"grad_norm": 7.111969470977783, |
|
"learning_rate": 4.999966844992657e-06, |
|
"loss": 0.9172, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.46747352496217853, |
|
"grad_norm": 3.792372465133667, |
|
"learning_rate": 4.999925401439588e-06, |
|
"loss": 0.8718, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4720121028744327, |
|
"grad_norm": 4.017702579498291, |
|
"learning_rate": 4.999867380850031e-06, |
|
"loss": 0.9121, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.47655068078668683, |
|
"grad_norm": 6.903019905090332, |
|
"learning_rate": 4.99979278360872e-06, |
|
"loss": 0.9359, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.481089258698941, |
|
"grad_norm": 3.740607500076294, |
|
"learning_rate": 4.999701610210309e-06, |
|
"loss": 0.8474, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.4856278366111952, |
|
"grad_norm": 3.379190683364868, |
|
"learning_rate": 4.999593861259373e-06, |
|
"loss": 0.8641, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.49016641452344933, |
|
"grad_norm": 3.0796492099761963, |
|
"learning_rate": 4.999469537470394e-06, |
|
"loss": 0.8255, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.4947049924357035, |
|
"grad_norm": 3.3708953857421875, |
|
"learning_rate": 4.999328639667765e-06, |
|
"loss": 0.8823, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.49924357034795763, |
|
"grad_norm": 3.2106549739837646, |
|
"learning_rate": 4.999171168785783e-06, |
|
"loss": 0.8584, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5037821482602118, |
|
"grad_norm": 6.520877838134766, |
|
"learning_rate": 4.998997125868638e-06, |
|
"loss": 0.8718, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5083207261724659, |
|
"grad_norm": 1.0179104804992676, |
|
"learning_rate": 4.99880651207041e-06, |
|
"loss": 0.871, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5128593040847201, |
|
"grad_norm": 1.1883283853530884, |
|
"learning_rate": 4.998599328655063e-06, |
|
"loss": 0.8689, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5173978819969742, |
|
"grad_norm": 1.2722241878509521, |
|
"learning_rate": 4.998375576996431e-06, |
|
"loss": 0.8594, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.5219364599092284, |
|
"grad_norm": 1.8931000232696533, |
|
"learning_rate": 4.9981352585782154e-06, |
|
"loss": 0.8327, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5264750378214826, |
|
"grad_norm": 3.093480348587036, |
|
"learning_rate": 4.997878374993971e-06, |
|
"loss": 0.8372, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.5310136157337367, |
|
"grad_norm": 5.2650957107543945, |
|
"learning_rate": 4.9976049279470955e-06, |
|
"loss": 0.8386, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5355521936459909, |
|
"grad_norm": 4.467101573944092, |
|
"learning_rate": 4.997314919250818e-06, |
|
"loss": 0.7788, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.540090771558245, |
|
"grad_norm": 3.614868640899658, |
|
"learning_rate": 4.997008350828192e-06, |
|
"loss": 0.874, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5446293494704992, |
|
"grad_norm": 12.209319114685059, |
|
"learning_rate": 4.996685224712077e-06, |
|
"loss": 0.8223, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5491679273827534, |
|
"grad_norm": 4.1903252601623535, |
|
"learning_rate": 4.9963455430451245e-06, |
|
"loss": 0.8455, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5537065052950075, |
|
"grad_norm": 2.649549722671509, |
|
"learning_rate": 4.9959893080797675e-06, |
|
"loss": 0.7875, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5582450832072617, |
|
"grad_norm": 4.935393810272217, |
|
"learning_rate": 4.995616522178207e-06, |
|
"loss": 0.8691, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5627836611195158, |
|
"grad_norm": 5.565356731414795, |
|
"learning_rate": 4.995227187812389e-06, |
|
"loss": 0.8267, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.56732223903177, |
|
"grad_norm": 5.23801851272583, |
|
"learning_rate": 4.994821307563995e-06, |
|
"loss": 0.826, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5718608169440242, |
|
"grad_norm": 4.2979736328125, |
|
"learning_rate": 4.994398884124422e-06, |
|
"loss": 0.8068, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5763993948562783, |
|
"grad_norm": 6.285053730010986, |
|
"learning_rate": 4.993959920294764e-06, |
|
"loss": 0.819, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5809379727685325, |
|
"grad_norm": 10.453060150146484, |
|
"learning_rate": 4.9935044189857975e-06, |
|
"loss": 0.773, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5854765506807866, |
|
"grad_norm": 31.77552604675293, |
|
"learning_rate": 4.993032383217957e-06, |
|
"loss": 0.8738, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5900151285930408, |
|
"grad_norm": 1.1456351280212402, |
|
"learning_rate": 4.992543816121317e-06, |
|
"loss": 0.7518, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.594553706505295, |
|
"grad_norm": 1.3343666791915894, |
|
"learning_rate": 4.992038720935572e-06, |
|
"loss": 0.8108, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5990922844175491, |
|
"grad_norm": 0.8912076354026794, |
|
"learning_rate": 4.991517101010015e-06, |
|
"loss": 0.8159, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6036308623298033, |
|
"grad_norm": 2.249366044998169, |
|
"learning_rate": 4.990978959803513e-06, |
|
"loss": 0.8124, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.6081694402420574, |
|
"grad_norm": 1.2822734117507935, |
|
"learning_rate": 4.990424300884488e-06, |
|
"loss": 0.8213, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.6127080181543116, |
|
"grad_norm": 6.053490161895752, |
|
"learning_rate": 4.98985312793089e-06, |
|
"loss": 0.7829, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6172465960665658, |
|
"grad_norm": 2.2606236934661865, |
|
"learning_rate": 4.989265444730176e-06, |
|
"loss": 0.7729, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6217851739788199, |
|
"grad_norm": 3.8894989490509033, |
|
"learning_rate": 4.988661255179276e-06, |
|
"loss": 0.774, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6263237518910741, |
|
"grad_norm": 5.648194313049316, |
|
"learning_rate": 4.988040563284582e-06, |
|
"loss": 0.7251, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6308623298033282, |
|
"grad_norm": 1.7583339214324951, |
|
"learning_rate": 4.98740337316191e-06, |
|
"loss": 0.7875, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.6354009077155824, |
|
"grad_norm": 1.3959944248199463, |
|
"learning_rate": 4.9867496890364734e-06, |
|
"loss": 0.7726, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6399394856278366, |
|
"grad_norm": 1.7879443168640137, |
|
"learning_rate": 4.986079515242861e-06, |
|
"loss": 0.786, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6444780635400907, |
|
"grad_norm": 2.0919816493988037, |
|
"learning_rate": 4.985392856225003e-06, |
|
"loss": 0.7802, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.649016641452345, |
|
"grad_norm": 1.272477626800537, |
|
"learning_rate": 4.984689716536145e-06, |
|
"loss": 0.7842, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.653555219364599, |
|
"grad_norm": 1.1265331506729126, |
|
"learning_rate": 4.983970100838814e-06, |
|
"loss": 0.736, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.6580937972768532, |
|
"grad_norm": 0.8514362573623657, |
|
"learning_rate": 4.983234013904791e-06, |
|
"loss": 0.749, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6626323751891074, |
|
"grad_norm": 3.148453950881958, |
|
"learning_rate": 4.9824814606150774e-06, |
|
"loss": 0.7884, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6671709531013615, |
|
"grad_norm": 1.531554937362671, |
|
"learning_rate": 4.981712445959864e-06, |
|
"loss": 0.767, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6717095310136157, |
|
"grad_norm": 1.7979401350021362, |
|
"learning_rate": 4.980926975038496e-06, |
|
"loss": 0.7575, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6762481089258698, |
|
"grad_norm": 1.1131621599197388, |
|
"learning_rate": 4.9801250530594415e-06, |
|
"loss": 0.76, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.680786686838124, |
|
"grad_norm": 1.2112400531768799, |
|
"learning_rate": 4.9793066853402535e-06, |
|
"loss": 0.769, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6853252647503782, |
|
"grad_norm": 1.276172161102295, |
|
"learning_rate": 4.978471877307541e-06, |
|
"loss": 0.7641, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6898638426626323, |
|
"grad_norm": 7.614717960357666, |
|
"learning_rate": 4.977620634496926e-06, |
|
"loss": 0.7614, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.6944024205748865, |
|
"grad_norm": 0.9541272521018982, |
|
"learning_rate": 4.976752962553008e-06, |
|
"loss": 0.7406, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6989409984871406, |
|
"grad_norm": 0.9793027639389038, |
|
"learning_rate": 4.975868867229332e-06, |
|
"loss": 0.7538, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.7034795763993948, |
|
"grad_norm": 8.540267944335938, |
|
"learning_rate": 4.974968354388346e-06, |
|
"loss": 0.7616, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.708018154311649, |
|
"grad_norm": 2.279240608215332, |
|
"learning_rate": 4.97405143000136e-06, |
|
"loss": 0.7157, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.7125567322239031, |
|
"grad_norm": 2.947227716445923, |
|
"learning_rate": 4.973118100148513e-06, |
|
"loss": 0.7348, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.7170953101361573, |
|
"grad_norm": 2.211785316467285, |
|
"learning_rate": 4.9721683710187255e-06, |
|
"loss": 0.7144, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.7216338880484114, |
|
"grad_norm": 1.3755372762680054, |
|
"learning_rate": 4.971202248909662e-06, |
|
"loss": 0.6857, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.7261724659606656, |
|
"grad_norm": 2.564708709716797, |
|
"learning_rate": 4.970219740227693e-06, |
|
"loss": 0.7124, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7307110438729199, |
|
"grad_norm": 11.76566219329834, |
|
"learning_rate": 4.9692208514878445e-06, |
|
"loss": 0.7815, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.735249621785174, |
|
"grad_norm": 3.7665200233459473, |
|
"learning_rate": 4.9682055893137605e-06, |
|
"loss": 0.7021, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.7397881996974282, |
|
"grad_norm": 2.9076344966888428, |
|
"learning_rate": 4.967173960437657e-06, |
|
"loss": 0.7083, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.7443267776096822, |
|
"grad_norm": 2.336026191711426, |
|
"learning_rate": 4.966125971700277e-06, |
|
"loss": 0.7455, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.7488653555219364, |
|
"grad_norm": 6.48813009262085, |
|
"learning_rate": 4.965061630050848e-06, |
|
"loss": 0.7628, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.7534039334341907, |
|
"grad_norm": 1.9123872518539429, |
|
"learning_rate": 4.9639809425470324e-06, |
|
"loss": 0.7212, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.7579425113464447, |
|
"grad_norm": 6.638424873352051, |
|
"learning_rate": 4.962883916354882e-06, |
|
"loss": 0.7523, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.762481089258699, |
|
"grad_norm": 1.3606716394424438, |
|
"learning_rate": 4.961770558748793e-06, |
|
"loss": 0.7651, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.7670196671709532, |
|
"grad_norm": 1.2726523876190186, |
|
"learning_rate": 4.960640877111451e-06, |
|
"loss": 0.7729, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.7715582450832073, |
|
"grad_norm": 0.9216910600662231, |
|
"learning_rate": 4.959494878933792e-06, |
|
"loss": 0.7162, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7760968229954615, |
|
"grad_norm": 2.3702430725097656, |
|
"learning_rate": 4.958332571814941e-06, |
|
"loss": 0.7038, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.7806354009077155, |
|
"grad_norm": 4.6101555824279785, |
|
"learning_rate": 4.957153963462172e-06, |
|
"loss": 0.7141, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.7851739788199698, |
|
"grad_norm": 1.7314170598983765, |
|
"learning_rate": 4.955959061690853e-06, |
|
"loss": 0.7374, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.789712556732224, |
|
"grad_norm": 2.91019868850708, |
|
"learning_rate": 4.9547478744243914e-06, |
|
"loss": 0.7048, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.794251134644478, |
|
"grad_norm": 2.872775077819824, |
|
"learning_rate": 4.953520409694186e-06, |
|
"loss": 0.6912, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7987897125567323, |
|
"grad_norm": 2.2696948051452637, |
|
"learning_rate": 4.952276675639569e-06, |
|
"loss": 0.7432, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.8033282904689864, |
|
"grad_norm": 1.8855810165405273, |
|
"learning_rate": 4.951016680507757e-06, |
|
"loss": 0.7056, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.8078668683812406, |
|
"grad_norm": 5.202772617340088, |
|
"learning_rate": 4.9497404326537954e-06, |
|
"loss": 0.7114, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.8124054462934948, |
|
"grad_norm": 0.7916449904441833, |
|
"learning_rate": 4.948447940540497e-06, |
|
"loss": 0.7202, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.8169440242057489, |
|
"grad_norm": 1.5009609460830688, |
|
"learning_rate": 4.947139212738395e-06, |
|
"loss": 0.7245, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8214826021180031, |
|
"grad_norm": 1.858067512512207, |
|
"learning_rate": 4.945814257925679e-06, |
|
"loss": 0.6962, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.8260211800302572, |
|
"grad_norm": 0.7835391163825989, |
|
"learning_rate": 4.94447308488814e-06, |
|
"loss": 0.6875, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.8305597579425114, |
|
"grad_norm": 21.755929946899414, |
|
"learning_rate": 4.943115702519115e-06, |
|
"loss": 0.7304, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.8350983358547656, |
|
"grad_norm": 6.944667816162109, |
|
"learning_rate": 4.941742119819421e-06, |
|
"loss": 0.7381, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.8396369137670197, |
|
"grad_norm": 0.9813210368156433, |
|
"learning_rate": 4.940352345897304e-06, |
|
"loss": 0.682, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.8441754916792739, |
|
"grad_norm": 1.8010449409484863, |
|
"learning_rate": 4.938946389968372e-06, |
|
"loss": 0.7639, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.848714069591528, |
|
"grad_norm": 2.293980121612549, |
|
"learning_rate": 4.937524261355535e-06, |
|
"loss": 0.7027, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.8532526475037822, |
|
"grad_norm": 1.9937771558761597, |
|
"learning_rate": 4.9360859694889475e-06, |
|
"loss": 0.688, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.8577912254160364, |
|
"grad_norm": 1.8442484140396118, |
|
"learning_rate": 4.934631523905938e-06, |
|
"loss": 0.6261, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.8623298033282905, |
|
"grad_norm": 9.626107215881348, |
|
"learning_rate": 4.933160934250957e-06, |
|
"loss": 0.7605, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8668683812405447, |
|
"grad_norm": 14.051777839660645, |
|
"learning_rate": 4.931674210275499e-06, |
|
"loss": 0.7183, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.8714069591527988, |
|
"grad_norm": 2.1191225051879883, |
|
"learning_rate": 4.930171361838052e-06, |
|
"loss": 0.697, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.875945537065053, |
|
"grad_norm": 1.4051584005355835, |
|
"learning_rate": 4.928652398904022e-06, |
|
"loss": 0.6985, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.8804841149773072, |
|
"grad_norm": 0.7633137106895447, |
|
"learning_rate": 4.92711733154567e-06, |
|
"loss": 0.7018, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.8850226928895613, |
|
"grad_norm": 1.5257729291915894, |
|
"learning_rate": 4.925566169942048e-06, |
|
"loss": 0.7108, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.8895612708018155, |
|
"grad_norm": 1.7885994911193848, |
|
"learning_rate": 4.9239989243789275e-06, |
|
"loss": 0.7251, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.8940998487140696, |
|
"grad_norm": 1.8133364915847778, |
|
"learning_rate": 4.922415605248734e-06, |
|
"loss": 0.691, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8986384266263238, |
|
"grad_norm": 1.3306565284729004, |
|
"learning_rate": 4.920816223050475e-06, |
|
"loss": 0.6496, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.903177004538578, |
|
"grad_norm": 2.5053746700286865, |
|
"learning_rate": 4.919200788389675e-06, |
|
"loss": 0.7174, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.9077155824508321, |
|
"grad_norm": 2.8250479698181152, |
|
"learning_rate": 4.917569311978301e-06, |
|
"loss": 0.6992, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9122541603630863, |
|
"grad_norm": 0.6993988752365112, |
|
"learning_rate": 4.915921804634693e-06, |
|
"loss": 0.6983, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.9167927382753404, |
|
"grad_norm": 2.591536045074463, |
|
"learning_rate": 4.914258277283494e-06, |
|
"loss": 0.6686, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.9213313161875946, |
|
"grad_norm": 1.7433080673217773, |
|
"learning_rate": 4.912578740955573e-06, |
|
"loss": 0.686, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.9258698940998488, |
|
"grad_norm": 1.5048431158065796, |
|
"learning_rate": 4.910883206787958e-06, |
|
"loss": 0.7043, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.9304084720121029, |
|
"grad_norm": 0.7160290479660034, |
|
"learning_rate": 4.9091716860237545e-06, |
|
"loss": 0.6703, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.9349470499243571, |
|
"grad_norm": 16.788084030151367, |
|
"learning_rate": 4.907444190012081e-06, |
|
"loss": 0.7465, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.9394856278366112, |
|
"grad_norm": 6.705326557159424, |
|
"learning_rate": 4.905700730207983e-06, |
|
"loss": 0.6692, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.9440242057488654, |
|
"grad_norm": 1.1539785861968994, |
|
"learning_rate": 4.903941318172365e-06, |
|
"loss": 0.6769, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.9485627836611196, |
|
"grad_norm": 0.806441068649292, |
|
"learning_rate": 4.902165965571911e-06, |
|
"loss": 0.6788, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.9531013615733737, |
|
"grad_norm": 2.599201202392578, |
|
"learning_rate": 4.900374684179005e-06, |
|
"loss": 0.6845, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.9576399394856279, |
|
"grad_norm": 1.9495007991790771, |
|
"learning_rate": 4.898567485871656e-06, |
|
"loss": 0.68, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.962178517397882, |
|
"grad_norm": 1.3471159934997559, |
|
"learning_rate": 4.896744382633419e-06, |
|
"loss": 0.6799, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.9667170953101362, |
|
"grad_norm": 2.436737537384033, |
|
"learning_rate": 4.894905386553316e-06, |
|
"loss": 0.6688, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.9712556732223904, |
|
"grad_norm": 2.486992597579956, |
|
"learning_rate": 4.893050509825749e-06, |
|
"loss": 0.6866, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.9757942511346445, |
|
"grad_norm": 2.137023687362671, |
|
"learning_rate": 4.891179764750434e-06, |
|
"loss": 0.6891, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.9803328290468987, |
|
"grad_norm": 1.3739172220230103, |
|
"learning_rate": 4.8892931637323e-06, |
|
"loss": 0.6219, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.9848714069591528, |
|
"grad_norm": 1.6083399057388306, |
|
"learning_rate": 4.887390719281423e-06, |
|
"loss": 0.6805, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.989409984871407, |
|
"grad_norm": 0.7767590284347534, |
|
"learning_rate": 4.885472444012937e-06, |
|
"loss": 0.6857, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.9939485627836612, |
|
"grad_norm": 0.8085631728172302, |
|
"learning_rate": 4.883538350646949e-06, |
|
"loss": 0.6594, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.9984871406959153, |
|
"grad_norm": 0.7762560844421387, |
|
"learning_rate": 4.881588452008457e-06, |
|
"loss": 0.6972, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.7762560844421387, |
|
"learning_rate": 4.8796227610272615e-06, |
|
"loss": 0.2152, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.0045385779122542, |
|
"grad_norm": 0.8768157958984375, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 0.6847, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.0090771558245084, |
|
"grad_norm": 2.3720204830169678, |
|
"learning_rate": 4.8756440542794805e-06, |
|
"loss": 0.6646, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.0136157337367624, |
|
"grad_norm": 2.1314537525177, |
|
"learning_rate": 4.873631064895749e-06, |
|
"loss": 0.676, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.0181543116490166, |
|
"grad_norm": 1.7491836547851562, |
|
"learning_rate": 4.871602335934847e-06, |
|
"loss": 0.6474, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.0226928895612708, |
|
"grad_norm": 1.7630263566970825, |
|
"learning_rate": 4.8695578808493034e-06, |
|
"loss": 0.6541, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.027231467473525, |
|
"grad_norm": 1.195518970489502, |
|
"learning_rate": 4.867497713195925e-06, |
|
"loss": 0.6529, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.0317700453857792, |
|
"grad_norm": 5.6356377601623535, |
|
"learning_rate": 4.8654218466357066e-06, |
|
"loss": 0.6666, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.0363086232980332, |
|
"grad_norm": 21.327880859375, |
|
"learning_rate": 4.863330294933748e-06, |
|
"loss": 0.6721, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.0408472012102874, |
|
"grad_norm": 14.34103012084961, |
|
"learning_rate": 4.8612230719591535e-06, |
|
"loss": 0.6374, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.0453857791225416, |
|
"grad_norm": 7.112085819244385, |
|
"learning_rate": 4.859100191684946e-06, |
|
"loss": 0.6729, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.0499243570347958, |
|
"grad_norm": 2.3837637901306152, |
|
"learning_rate": 4.856961668187968e-06, |
|
"loss": 0.6741, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.05446293494705, |
|
"grad_norm": 1.5125519037246704, |
|
"learning_rate": 4.854807515648799e-06, |
|
"loss": 0.6584, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.059001512859304, |
|
"grad_norm": 1.8248745203018188, |
|
"learning_rate": 4.852637748351651e-06, |
|
"loss": 0.6481, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.0635400907715582, |
|
"grad_norm": 2.260824203491211, |
|
"learning_rate": 4.850452380684275e-06, |
|
"loss": 0.6695, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.0680786686838124, |
|
"grad_norm": 1.764312982559204, |
|
"learning_rate": 4.848251427137875e-06, |
|
"loss": 0.6638, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.0726172465960666, |
|
"grad_norm": 0.9826205968856812, |
|
"learning_rate": 4.846034902306997e-06, |
|
"loss": 0.6515, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.0771558245083208, |
|
"grad_norm": 1.1290264129638672, |
|
"learning_rate": 4.8438028208894496e-06, |
|
"loss": 0.6483, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.0816944024205748, |
|
"grad_norm": 4.97009801864624, |
|
"learning_rate": 4.841555197686189e-06, |
|
"loss": 0.6605, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.086232980332829, |
|
"grad_norm": 6.06207799911499, |
|
"learning_rate": 4.839292047601234e-06, |
|
"loss": 0.6147, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.0907715582450832, |
|
"grad_norm": 1.2093101739883423, |
|
"learning_rate": 4.837013385641562e-06, |
|
"loss": 0.6739, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.0953101361573374, |
|
"grad_norm": 0.9535529017448425, |
|
"learning_rate": 4.834719226917007e-06, |
|
"loss": 0.6392, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.0998487140695916, |
|
"grad_norm": 1.4470645189285278, |
|
"learning_rate": 4.832409586640164e-06, |
|
"loss": 0.6357, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.1043872919818456, |
|
"grad_norm": 1.4899321794509888, |
|
"learning_rate": 4.830084480126288e-06, |
|
"loss": 0.6704, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.1089258698940998, |
|
"grad_norm": 0.7240656614303589, |
|
"learning_rate": 4.827743922793189e-06, |
|
"loss": 0.621, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.113464447806354, |
|
"grad_norm": 0.8096688985824585, |
|
"learning_rate": 4.8253879301611315e-06, |
|
"loss": 0.6561, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.1180030257186082, |
|
"grad_norm": 1.7580249309539795, |
|
"learning_rate": 4.823016517852731e-06, |
|
"loss": 0.5893, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.1225416036308624, |
|
"grad_norm": 2.5382940769195557, |
|
"learning_rate": 4.820629701592853e-06, |
|
"loss": 0.6548, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.1270801815431164, |
|
"grad_norm": 1.0767178535461426, |
|
"learning_rate": 4.8182274972085065e-06, |
|
"loss": 0.6801, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.1316187594553706, |
|
"grad_norm": 0.7919514179229736, |
|
"learning_rate": 4.815809920628738e-06, |
|
"loss": 0.6314, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.1361573373676248, |
|
"grad_norm": 1.305253267288208, |
|
"learning_rate": 4.813376987884527e-06, |
|
"loss": 0.6347, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.140695915279879, |
|
"grad_norm": 1.4656856060028076, |
|
"learning_rate": 4.810928715108683e-06, |
|
"loss": 0.6253, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.1452344931921332, |
|
"grad_norm": 1.2813221216201782, |
|
"learning_rate": 4.808465118535732e-06, |
|
"loss": 0.6751, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.1497730711043872, |
|
"grad_norm": 3.507342576980591, |
|
"learning_rate": 4.805986214501813e-06, |
|
"loss": 0.6606, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.1543116490166414, |
|
"grad_norm": 4.23391056060791, |
|
"learning_rate": 4.803492019444571e-06, |
|
"loss": 0.6278, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.1588502269288956, |
|
"grad_norm": 2.3074967861175537, |
|
"learning_rate": 4.8009825499030426e-06, |
|
"loss": 0.6175, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.1633888048411498, |
|
"grad_norm": 1.3244863748550415, |
|
"learning_rate": 4.798457822517554e-06, |
|
"loss": 0.6392, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.167927382753404, |
|
"grad_norm": 1.0530226230621338, |
|
"learning_rate": 4.795917854029601e-06, |
|
"loss": 0.6305, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.172465960665658, |
|
"grad_norm": 2.187415599822998, |
|
"learning_rate": 4.79336266128175e-06, |
|
"loss": 0.6432, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.1770045385779122, |
|
"grad_norm": 1.4672960042953491, |
|
"learning_rate": 4.790792261217513e-06, |
|
"loss": 0.649, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.1815431164901664, |
|
"grad_norm": 1.1620965003967285, |
|
"learning_rate": 4.788206670881245e-06, |
|
"loss": 0.6507, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.1860816944024206, |
|
"grad_norm": 0.7209274768829346, |
|
"learning_rate": 4.785605907418029e-06, |
|
"loss": 0.6502, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.1906202723146748, |
|
"grad_norm": 3.6349446773529053, |
|
"learning_rate": 4.78298998807356e-06, |
|
"loss": 0.6251, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.1951588502269288, |
|
"grad_norm": 13.263801574707031, |
|
"learning_rate": 4.7803589301940306e-06, |
|
"loss": 0.6663, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.199697428139183, |
|
"grad_norm": 12.560731887817383, |
|
"learning_rate": 4.777712751226019e-06, |
|
"loss": 0.6709, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.2042360060514372, |
|
"grad_norm": 3.721285820007324, |
|
"learning_rate": 4.775051468716371e-06, |
|
"loss": 0.6555, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.2087745839636914, |
|
"grad_norm": 0.974590539932251, |
|
"learning_rate": 4.772375100312084e-06, |
|
"loss": 0.6308, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.2133131618759456, |
|
"grad_norm": 1.3410248756408691, |
|
"learning_rate": 4.769683663760191e-06, |
|
"loss": 0.6506, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.2178517397881996, |
|
"grad_norm": 1.4115511178970337, |
|
"learning_rate": 4.7669771769076395e-06, |
|
"loss": 0.6296, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.2223903177004538, |
|
"grad_norm": 1.355098843574524, |
|
"learning_rate": 4.764255657701179e-06, |
|
"loss": 0.6756, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.226928895612708, |
|
"grad_norm": 0.9310200810432434, |
|
"learning_rate": 4.761519124187237e-06, |
|
"loss": 0.6724, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.2314674735249622, |
|
"grad_norm": 1.0098122358322144, |
|
"learning_rate": 4.758767594511801e-06, |
|
"loss": 0.6595, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.2360060514372164, |
|
"grad_norm": 2.7444238662719727, |
|
"learning_rate": 4.7560010869202985e-06, |
|
"loss": 0.582, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.2405446293494704, |
|
"grad_norm": 2.2694830894470215, |
|
"learning_rate": 4.753219619757477e-06, |
|
"loss": 0.6411, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.2450832072617246, |
|
"grad_norm": 1.1762354373931885, |
|
"learning_rate": 4.750423211467278e-06, |
|
"loss": 0.6358, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.2496217851739788, |
|
"grad_norm": 0.86478191614151, |
|
"learning_rate": 4.7476118805927214e-06, |
|
"loss": 0.6234, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.254160363086233, |
|
"grad_norm": 1.143272876739502, |
|
"learning_rate": 4.7447856457757765e-06, |
|
"loss": 0.6627, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.258698940998487, |
|
"grad_norm": 1.7226762771606445, |
|
"learning_rate": 4.7419445257572414e-06, |
|
"loss": 0.6248, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.2632375189107412, |
|
"grad_norm": 1.428463101387024, |
|
"learning_rate": 4.739088539376618e-06, |
|
"loss": 0.6577, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.2677760968229954, |
|
"grad_norm": 0.9464501142501831, |
|
"learning_rate": 4.736217705571989e-06, |
|
"loss": 0.6464, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.2723146747352496, |
|
"grad_norm": 0.8889546394348145, |
|
"learning_rate": 4.733332043379889e-06, |
|
"loss": 0.6249, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.2768532526475038, |
|
"grad_norm": 0.7456269860267639, |
|
"learning_rate": 4.730431571935178e-06, |
|
"loss": 0.6242, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.281391830559758, |
|
"grad_norm": 9.802299499511719, |
|
"learning_rate": 4.72751631047092e-06, |
|
"loss": 0.6576, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.2859304084720122, |
|
"grad_norm": 15.863835334777832, |
|
"learning_rate": 4.72458627831825e-06, |
|
"loss": 0.6916, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.2904689863842662, |
|
"grad_norm": 15.025418281555176, |
|
"learning_rate": 4.721641494906247e-06, |
|
"loss": 0.7036, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.2950075642965204, |
|
"grad_norm": 3.8970537185668945, |
|
"learning_rate": 4.718681979761806e-06, |
|
"loss": 0.6166, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.2995461422087746, |
|
"grad_norm": 0.6507979035377502, |
|
"learning_rate": 4.715707752509512e-06, |
|
"loss": 0.613, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.3040847201210286, |
|
"grad_norm": 1.1878042221069336, |
|
"learning_rate": 4.712718832871499e-06, |
|
"loss": 0.6474, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.3086232980332828, |
|
"grad_norm": 1.0940614938735962, |
|
"learning_rate": 4.709715240667332e-06, |
|
"loss": 0.6577, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.313161875945537, |
|
"grad_norm": 0.9987061619758606, |
|
"learning_rate": 4.706696995813869e-06, |
|
"loss": 0.6571, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.3177004538577912, |
|
"grad_norm": 1.5589380264282227, |
|
"learning_rate": 4.7036641183251285e-06, |
|
"loss": 0.6495, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.3222390317700454, |
|
"grad_norm": 1.525474190711975, |
|
"learning_rate": 4.700616628312159e-06, |
|
"loss": 0.5986, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.3267776096822996, |
|
"grad_norm": 0.8548336625099182, |
|
"learning_rate": 4.697554545982904e-06, |
|
"loss": 0.6034, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.3313161875945538, |
|
"grad_norm": 4.231250286102295, |
|
"learning_rate": 4.6944778916420705e-06, |
|
"loss": 0.6405, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.3358547655068078, |
|
"grad_norm": 8.273162841796875, |
|
"learning_rate": 4.691386685690993e-06, |
|
"loss": 0.6635, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.340393343419062, |
|
"grad_norm": 4.974193096160889, |
|
"learning_rate": 4.6882809486274934e-06, |
|
"loss": 0.6289, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.3449319213313162, |
|
"grad_norm": 3.757338523864746, |
|
"learning_rate": 4.685160701045757e-06, |
|
"loss": 0.6227, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.3494704992435702, |
|
"grad_norm": 1.2015799283981323, |
|
"learning_rate": 4.68202596363618e-06, |
|
"loss": 0.6237, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.3540090771558244, |
|
"grad_norm": 0.7638722658157349, |
|
"learning_rate": 4.678876757185248e-06, |
|
"loss": 0.6063, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.3585476550680786, |
|
"grad_norm": 1.2864232063293457, |
|
"learning_rate": 4.675713102575389e-06, |
|
"loss": 0.5997, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.3630862329803328, |
|
"grad_norm": 1.1902930736541748, |
|
"learning_rate": 4.672535020784833e-06, |
|
"loss": 0.6352, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.367624810892587, |
|
"grad_norm": 1.4321516752243042, |
|
"learning_rate": 4.669342532887482e-06, |
|
"loss": 0.6531, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.3721633888048412, |
|
"grad_norm": 1.139543890953064, |
|
"learning_rate": 4.666135660052764e-06, |
|
"loss": 0.6235, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.3767019667170954, |
|
"grad_norm": 0.7234447598457336, |
|
"learning_rate": 4.66291442354549e-06, |
|
"loss": 0.6012, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.3812405446293494, |
|
"grad_norm": 3.093146324157715, |
|
"learning_rate": 4.659678844725722e-06, |
|
"loss": 0.6058, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.3857791225416036, |
|
"grad_norm": 2.840275764465332, |
|
"learning_rate": 4.656428945048622e-06, |
|
"loss": 0.6139, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.3903177004538578, |
|
"grad_norm": 1.0061054229736328, |
|
"learning_rate": 4.653164746064315e-06, |
|
"loss": 0.6288, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.394856278366112, |
|
"grad_norm": 0.9403374195098877, |
|
"learning_rate": 4.649886269417746e-06, |
|
"loss": 0.6435, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.399394856278366, |
|
"grad_norm": 1.0838265419006348, |
|
"learning_rate": 4.646593536848535e-06, |
|
"loss": 0.6485, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.4039334341906202, |
|
"grad_norm": 1.2738953828811646, |
|
"learning_rate": 4.643286570190832e-06, |
|
"loss": 0.5993, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.4084720121028744, |
|
"grad_norm": 1.3124756813049316, |
|
"learning_rate": 4.639965391373173e-06, |
|
"loss": 0.6154, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.4130105900151286, |
|
"grad_norm": 0.7026720643043518, |
|
"learning_rate": 4.636630022418337e-06, |
|
"loss": 0.6493, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.4175491679273828, |
|
"grad_norm": 1.101508617401123, |
|
"learning_rate": 4.6332804854431986e-06, |
|
"loss": 0.6437, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.422087745839637, |
|
"grad_norm": 0.6824156641960144, |
|
"learning_rate": 4.6299168026585775e-06, |
|
"loss": 0.6017, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.426626323751891, |
|
"grad_norm": 0.8083431124687195, |
|
"learning_rate": 4.626538996369096e-06, |
|
"loss": 0.6338, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.4311649016641452, |
|
"grad_norm": 0.9624136090278625, |
|
"learning_rate": 4.623147088973031e-06, |
|
"loss": 0.5804, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.4357034795763994, |
|
"grad_norm": 0.8000622987747192, |
|
"learning_rate": 4.619741102962161e-06, |
|
"loss": 0.6242, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.4402420574886536, |
|
"grad_norm": 1.2038214206695557, |
|
"learning_rate": 4.6163210609216234e-06, |
|
"loss": 0.6259, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.4447806354009076, |
|
"grad_norm": 0.8374214768409729, |
|
"learning_rate": 4.612886985529759e-06, |
|
"loss": 0.6078, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.4493192133131618, |
|
"grad_norm": 1.0167770385742188, |
|
"learning_rate": 4.609438899557964e-06, |
|
"loss": 0.5972, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.453857791225416, |
|
"grad_norm": 0.8266498446464539, |
|
"learning_rate": 4.60597682587054e-06, |
|
"loss": 0.6211, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.4583963691376702, |
|
"grad_norm": 0.7585692405700684, |
|
"learning_rate": 4.6025007874245405e-06, |
|
"loss": 0.6233, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.4629349470499244, |
|
"grad_norm": 1.6358634233474731, |
|
"learning_rate": 4.59901080726962e-06, |
|
"loss": 0.6075, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.4674735249621786, |
|
"grad_norm": 1.1722335815429688, |
|
"learning_rate": 4.595506908547881e-06, |
|
"loss": 0.6066, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.4720121028744326, |
|
"grad_norm": 0.9726622104644775, |
|
"learning_rate": 4.591989114493718e-06, |
|
"loss": 0.6506, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.4765506807866868, |
|
"grad_norm": 0.8073020577430725, |
|
"learning_rate": 4.588457448433667e-06, |
|
"loss": 0.6077, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.481089258698941, |
|
"grad_norm": 0.71394282579422, |
|
"learning_rate": 4.584911933786252e-06, |
|
"loss": 0.5882, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.4856278366111952, |
|
"grad_norm": 4.143211364746094, |
|
"learning_rate": 4.581352594061824e-06, |
|
"loss": 0.6047, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.4901664145234492, |
|
"grad_norm": 3.5801639556884766, |
|
"learning_rate": 4.5777794528624075e-06, |
|
"loss": 0.6094, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.4947049924357034, |
|
"grad_norm": 0.9617034792900085, |
|
"learning_rate": 4.574192533881547e-06, |
|
"loss": 0.6291, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.4992435703479576, |
|
"grad_norm": 0.8535535931587219, |
|
"learning_rate": 4.570591860904149e-06, |
|
"loss": 0.6587, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.5037821482602118, |
|
"grad_norm": 1.426477074623108, |
|
"learning_rate": 4.566977457806317e-06, |
|
"loss": 0.6347, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.508320726172466, |
|
"grad_norm": 1.6053332090377808, |
|
"learning_rate": 4.563349348555207e-06, |
|
"loss": 0.603, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.5128593040847202, |
|
"grad_norm": 1.3673542737960815, |
|
"learning_rate": 4.5597075572088545e-06, |
|
"loss": 0.6443, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.5173978819969742, |
|
"grad_norm": 1.0444583892822266, |
|
"learning_rate": 4.556052107916023e-06, |
|
"loss": 0.6033, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.5219364599092284, |
|
"grad_norm": 2.568854331970215, |
|
"learning_rate": 4.552383024916044e-06, |
|
"loss": 0.6364, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.5264750378214826, |
|
"grad_norm": 0.8063260316848755, |
|
"learning_rate": 4.54870033253865e-06, |
|
"loss": 0.6406, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.5310136157337366, |
|
"grad_norm": 0.8449574112892151, |
|
"learning_rate": 4.545004055203823e-06, |
|
"loss": 0.5977, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.5355521936459908, |
|
"grad_norm": 0.7573151588439941, |
|
"learning_rate": 4.541294217421622e-06, |
|
"loss": 0.6098, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.540090771558245, |
|
"grad_norm": 0.7103497982025146, |
|
"learning_rate": 4.537570843792028e-06, |
|
"loss": 0.6344, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.5446293494704992, |
|
"grad_norm": 0.7327162623405457, |
|
"learning_rate": 4.5338339590047795e-06, |
|
"loss": 0.6318, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.5491679273827534, |
|
"grad_norm": 0.7688593864440918, |
|
"learning_rate": 4.530083587839204e-06, |
|
"loss": 0.6089, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.5537065052950076, |
|
"grad_norm": 0.9933049082756042, |
|
"learning_rate": 4.52631975516406e-06, |
|
"loss": 0.6003, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.5582450832072618, |
|
"grad_norm": 0.7319652438163757, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 0.6054, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.5627836611195158, |
|
"grad_norm": 0.8350914120674133, |
|
"learning_rate": 4.518751805206251e-06, |
|
"loss": 0.606, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.56732223903177, |
|
"grad_norm": 1.7987092733383179, |
|
"learning_rate": 4.514947738106755e-06, |
|
"loss": 0.6637, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.5718608169440242, |
|
"grad_norm": 0.6338518261909485, |
|
"learning_rate": 4.5111303098637005e-06, |
|
"loss": 0.5778, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.5763993948562782, |
|
"grad_norm": 0.854932427406311, |
|
"learning_rate": 4.5072995457905e-06, |
|
"loss": 0.598, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.5809379727685324, |
|
"grad_norm": 1.217940330505371, |
|
"learning_rate": 4.503455471288998e-06, |
|
"loss": 0.6087, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.5854765506807866, |
|
"grad_norm": 1.329987645149231, |
|
"learning_rate": 4.499598111849299e-06, |
|
"loss": 0.6321, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.5900151285930408, |
|
"grad_norm": 1.9689991474151611, |
|
"learning_rate": 4.495727493049604e-06, |
|
"loss": 0.6361, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.594553706505295, |
|
"grad_norm": 1.3316866159439087, |
|
"learning_rate": 4.491843640556033e-06, |
|
"loss": 0.6097, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.5990922844175492, |
|
"grad_norm": 1.2030465602874756, |
|
"learning_rate": 4.4879465801224605e-06, |
|
"loss": 0.6302, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.6036308623298035, |
|
"grad_norm": 0.9129522442817688, |
|
"learning_rate": 4.484036337590343e-06, |
|
"loss": 0.6398, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.6081694402420574, |
|
"grad_norm": 1.6810179948806763, |
|
"learning_rate": 4.4801129388885475e-06, |
|
"loss": 0.6234, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.6127080181543116, |
|
"grad_norm": 3.6033570766448975, |
|
"learning_rate": 4.476176410033179e-06, |
|
"loss": 0.6145, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.6172465960665658, |
|
"grad_norm": 1.2315465211868286, |
|
"learning_rate": 4.472226777127412e-06, |
|
"loss": 0.6274, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.6217851739788198, |
|
"grad_norm": 0.7585744261741638, |
|
"learning_rate": 4.468264066361308e-06, |
|
"loss": 0.5897, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.626323751891074, |
|
"grad_norm": 0.945957362651825, |
|
"learning_rate": 4.464288304011652e-06, |
|
"loss": 0.6078, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.6308623298033282, |
|
"grad_norm": 1.0154330730438232, |
|
"learning_rate": 4.460299516441777e-06, |
|
"loss": 0.5899, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.6354009077155824, |
|
"grad_norm": 0.8923754096031189, |
|
"learning_rate": 4.456297730101379e-06, |
|
"loss": 0.6204, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.6399394856278366, |
|
"grad_norm": 0.9550593495368958, |
|
"learning_rate": 4.452282971526355e-06, |
|
"loss": 0.5699, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.6444780635400909, |
|
"grad_norm": 0.7987310886383057, |
|
"learning_rate": 4.448255267338619e-06, |
|
"loss": 0.6325, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.649016641452345, |
|
"grad_norm": 0.830464780330658, |
|
"learning_rate": 4.444214644245928e-06, |
|
"loss": 0.6367, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.653555219364599, |
|
"grad_norm": 1.604446530342102, |
|
"learning_rate": 4.440161129041704e-06, |
|
"loss": 0.5668, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.6580937972768532, |
|
"grad_norm": 0.6995673179626465, |
|
"learning_rate": 4.436094748604856e-06, |
|
"loss": 0.596, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.6626323751891074, |
|
"grad_norm": 0.6944538950920105, |
|
"learning_rate": 4.432015529899604e-06, |
|
"loss": 0.6082, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.6671709531013614, |
|
"grad_norm": 0.9797276854515076, |
|
"learning_rate": 4.427923499975298e-06, |
|
"loss": 0.6104, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.6717095310136156, |
|
"grad_norm": 0.7624075412750244, |
|
"learning_rate": 4.423818685966239e-06, |
|
"loss": 0.5721, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.6762481089258698, |
|
"grad_norm": 0.8912142515182495, |
|
"learning_rate": 4.4197011150915e-06, |
|
"loss": 0.6162, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.680786686838124, |
|
"grad_norm": 1.4948642253875732, |
|
"learning_rate": 4.415570814654746e-06, |
|
"loss": 0.6355, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.6853252647503782, |
|
"grad_norm": 3.306320905685425, |
|
"learning_rate": 4.4114278120440494e-06, |
|
"loss": 0.6077, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.6898638426626325, |
|
"grad_norm": 0.6849818229675293, |
|
"learning_rate": 4.407272134731711e-06, |
|
"loss": 0.621, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.6944024205748867, |
|
"grad_norm": 0.9957187175750732, |
|
"learning_rate": 4.403103810274082e-06, |
|
"loss": 0.6468, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.6989409984871406, |
|
"grad_norm": 0.926688551902771, |
|
"learning_rate": 4.398922866311371e-06, |
|
"loss": 0.6021, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.7034795763993948, |
|
"grad_norm": 0.8220088481903076, |
|
"learning_rate": 4.394729330567471e-06, |
|
"loss": 0.5753, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.708018154311649, |
|
"grad_norm": 0.8064286112785339, |
|
"learning_rate": 4.390523230849769e-06, |
|
"loss": 0.6275, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.712556732223903, |
|
"grad_norm": 0.7482770681381226, |
|
"learning_rate": 4.386304595048966e-06, |
|
"loss": 0.6103, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.7170953101361572, |
|
"grad_norm": 1.6559797525405884, |
|
"learning_rate": 4.382073451138887e-06, |
|
"loss": 0.6366, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.7216338880484114, |
|
"grad_norm": 0.6992952227592468, |
|
"learning_rate": 4.3778298271762995e-06, |
|
"loss": 0.6188, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.7261724659606656, |
|
"grad_norm": 0.6812805533409119, |
|
"learning_rate": 4.373573751300729e-06, |
|
"loss": 0.6103, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.7307110438729199, |
|
"grad_norm": 0.767241358757019, |
|
"learning_rate": 4.369305251734267e-06, |
|
"loss": 0.6089, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.735249621785174, |
|
"grad_norm": 1.5500905513763428, |
|
"learning_rate": 4.365024356781386e-06, |
|
"loss": 0.6087, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.7397881996974283, |
|
"grad_norm": 0.8380416631698608, |
|
"learning_rate": 4.360731094828755e-06, |
|
"loss": 0.6074, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.7443267776096822, |
|
"grad_norm": 0.915096640586853, |
|
"learning_rate": 4.356425494345047e-06, |
|
"loss": 0.5962, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.7488653555219364, |
|
"grad_norm": 0.9544028639793396, |
|
"learning_rate": 4.352107583880753e-06, |
|
"loss": 0.5766, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.7534039334341907, |
|
"grad_norm": 0.7770220041275024, |
|
"learning_rate": 4.347777392067991e-06, |
|
"loss": 0.5879, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.7579425113464446, |
|
"grad_norm": 3.470493793487549, |
|
"learning_rate": 4.343434947620316e-06, |
|
"loss": 0.6107, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.7624810892586988, |
|
"grad_norm": 1.7937536239624023, |
|
"learning_rate": 4.339080279332531e-06, |
|
"loss": 0.5892, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.767019667170953, |
|
"grad_norm": 1.262220859527588, |
|
"learning_rate": 4.334713416080498e-06, |
|
"loss": 0.6321, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.7715582450832073, |
|
"grad_norm": 0.6722662448883057, |
|
"learning_rate": 4.33033438682094e-06, |
|
"loss": 0.6366, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.7760968229954615, |
|
"grad_norm": 1.0483866930007935, |
|
"learning_rate": 4.3259432205912544e-06, |
|
"loss": 0.5867, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.7806354009077157, |
|
"grad_norm": 1.2742741107940674, |
|
"learning_rate": 4.32153994650932e-06, |
|
"loss": 0.6045, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.7851739788199699, |
|
"grad_norm": 1.2766985893249512, |
|
"learning_rate": 4.317124593773301e-06, |
|
"loss": 0.5952, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.789712556732224, |
|
"grad_norm": 0.8167585134506226, |
|
"learning_rate": 4.312697191661457e-06, |
|
"loss": 0.5621, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.794251134644478, |
|
"grad_norm": 0.7818560004234314, |
|
"learning_rate": 4.308257769531947e-06, |
|
"loss": 0.5897, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.7987897125567323, |
|
"grad_norm": 1.0983150005340576, |
|
"learning_rate": 4.303806356822635e-06, |
|
"loss": 0.6189, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.8033282904689862, |
|
"grad_norm": 3.0957119464874268, |
|
"learning_rate": 4.299342983050892e-06, |
|
"loss": 0.5743, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.8078668683812404, |
|
"grad_norm": 1.364321231842041, |
|
"learning_rate": 4.294867677813407e-06, |
|
"loss": 0.5722, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.8124054462934946, |
|
"grad_norm": 1.0932508707046509, |
|
"learning_rate": 4.290380470785984e-06, |
|
"loss": 0.6074, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.8169440242057489, |
|
"grad_norm": 0.8298100829124451, |
|
"learning_rate": 4.285881391723348e-06, |
|
"loss": 0.6143, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.821482602118003, |
|
"grad_norm": 0.7821558117866516, |
|
"learning_rate": 4.2813704704589504e-06, |
|
"loss": 0.6148, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.8260211800302573, |
|
"grad_norm": 1.2309906482696533, |
|
"learning_rate": 4.276847736904765e-06, |
|
"loss": 0.6039, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.8305597579425115, |
|
"grad_norm": 0.7675696015357971, |
|
"learning_rate": 4.272313221051094e-06, |
|
"loss": 0.5869, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.8350983358547657, |
|
"grad_norm": 0.6467660069465637, |
|
"learning_rate": 4.267766952966369e-06, |
|
"loss": 0.5999, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.8396369137670197, |
|
"grad_norm": 2.9403133392333984, |
|
"learning_rate": 4.263208962796951e-06, |
|
"loss": 0.5859, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.8441754916792739, |
|
"grad_norm": 1.017529845237732, |
|
"learning_rate": 4.2586392807669286e-06, |
|
"loss": 0.5771, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.8487140695915278, |
|
"grad_norm": 0.7939811944961548, |
|
"learning_rate": 4.25405793717792e-06, |
|
"loss": 0.5968, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.853252647503782, |
|
"grad_norm": 0.9015148878097534, |
|
"learning_rate": 4.2494649624088724e-06, |
|
"loss": 0.5791, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.8577912254160363, |
|
"grad_norm": 1.0004379749298096, |
|
"learning_rate": 4.2448603869158585e-06, |
|
"loss": 0.5969, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.8623298033282905, |
|
"grad_norm": 0.8573418855667114, |
|
"learning_rate": 4.2402442412318765e-06, |
|
"loss": 0.6308, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.8668683812405447, |
|
"grad_norm": 0.7616469860076904, |
|
"learning_rate": 4.235616555966646e-06, |
|
"loss": 0.5955, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.8714069591527989, |
|
"grad_norm": 0.838377833366394, |
|
"learning_rate": 4.2309773618064035e-06, |
|
"loss": 0.6135, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.875945537065053, |
|
"grad_norm": 0.6853629350662231, |
|
"learning_rate": 4.226326689513705e-06, |
|
"loss": 0.5962, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.8804841149773073, |
|
"grad_norm": 3.4511594772338867, |
|
"learning_rate": 4.221664569927217e-06, |
|
"loss": 0.632, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.8850226928895613, |
|
"grad_norm": 2.316239833831787, |
|
"learning_rate": 4.216991033961511e-06, |
|
"loss": 0.5712, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.8895612708018155, |
|
"grad_norm": 0.7679340243339539, |
|
"learning_rate": 4.212306112606863e-06, |
|
"loss": 0.5849, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.8940998487140694, |
|
"grad_norm": 0.8144194483757019, |
|
"learning_rate": 4.207609836929045e-06, |
|
"loss": 0.5586, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.8986384266263236, |
|
"grad_norm": 0.7225912809371948, |
|
"learning_rate": 4.2029022380691195e-06, |
|
"loss": 0.606, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.9031770045385779, |
|
"grad_norm": 0.800234854221344, |
|
"learning_rate": 4.198183347243233e-06, |
|
"loss": 0.6024, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.907715582450832, |
|
"grad_norm": 0.7729604840278625, |
|
"learning_rate": 4.1934531957424095e-06, |
|
"loss": 0.598, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.9122541603630863, |
|
"grad_norm": 0.6805166602134705, |
|
"learning_rate": 4.188711814932343e-06, |
|
"loss": 0.6148, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.9167927382753405, |
|
"grad_norm": 1.0123629570007324, |
|
"learning_rate": 4.1839592362531875e-06, |
|
"loss": 0.6029, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.9213313161875947, |
|
"grad_norm": 0.8777531385421753, |
|
"learning_rate": 4.179195491219353e-06, |
|
"loss": 0.5721, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.9258698940998489, |
|
"grad_norm": 0.8781999945640564, |
|
"learning_rate": 4.1744206114192895e-06, |
|
"loss": 0.5761, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.9304084720121029, |
|
"grad_norm": 0.909726083278656, |
|
"learning_rate": 4.169634628515288e-06, |
|
"loss": 0.6101, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.934947049924357, |
|
"grad_norm": 0.8270307779312134, |
|
"learning_rate": 4.164837574243259e-06, |
|
"loss": 0.5635, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.939485627836611, |
|
"grad_norm": 0.8078930974006653, |
|
"learning_rate": 4.16002948041253e-06, |
|
"loss": 0.6117, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.9440242057488653, |
|
"grad_norm": 1.2975406646728516, |
|
"learning_rate": 4.155210378905629e-06, |
|
"loss": 0.6157, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.9485627836611195, |
|
"grad_norm": 1.0585848093032837, |
|
"learning_rate": 4.15038030167808e-06, |
|
"loss": 0.6252, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.9531013615733737, |
|
"grad_norm": 1.106614112854004, |
|
"learning_rate": 4.145539280758184e-06, |
|
"loss": 0.5781, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.9576399394856279, |
|
"grad_norm": 0.7705745697021484, |
|
"learning_rate": 4.140687348246814e-06, |
|
"loss": 0.6134, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.962178517397882, |
|
"grad_norm": 1.1777352094650269, |
|
"learning_rate": 4.1358245363171905e-06, |
|
"loss": 0.6231, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.9667170953101363, |
|
"grad_norm": 0.9292231798171997, |
|
"learning_rate": 4.130950877214683e-06, |
|
"loss": 0.6086, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.9712556732223905, |
|
"grad_norm": 1.0536510944366455, |
|
"learning_rate": 4.126066403256585e-06, |
|
"loss": 0.6077, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.9757942511346445, |
|
"grad_norm": 0.7694706916809082, |
|
"learning_rate": 4.121171146831905e-06, |
|
"loss": 0.6318, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.9803328290468987, |
|
"grad_norm": 1.4091219902038574, |
|
"learning_rate": 4.116265140401148e-06, |
|
"loss": 0.5873, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.9848714069591527, |
|
"grad_norm": 1.4843878746032715, |
|
"learning_rate": 4.111348416496104e-06, |
|
"loss": 0.5748, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.9894099848714069, |
|
"grad_norm": 2.431475877761841, |
|
"learning_rate": 4.106421007719631e-06, |
|
"loss": 0.6155, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.993948562783661, |
|
"grad_norm": 0.689834475517273, |
|
"learning_rate": 4.101482946745438e-06, |
|
"loss": 0.5792, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.9984871406959153, |
|
"grad_norm": 0.7212426662445068, |
|
"learning_rate": 4.096534266317869e-06, |
|
"loss": 0.6106, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.7212426662445068, |
|
"learning_rate": 4.091574999251685e-06, |
|
"loss": 0.1986, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 2.004538577912254, |
|
"grad_norm": 0.859722912311554, |
|
"learning_rate": 4.086605178431848e-06, |
|
"loss": 0.5424, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 2.0090771558245084, |
|
"grad_norm": 0.782247006893158, |
|
"learning_rate": 4.0816248368133015e-06, |
|
"loss": 0.5169, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 2.0136157337367626, |
|
"grad_norm": 0.7215720415115356, |
|
"learning_rate": 4.076634007420754e-06, |
|
"loss": 0.5712, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.018154311649017, |
|
"grad_norm": 0.7926766872406006, |
|
"learning_rate": 4.0716327233484544e-06, |
|
"loss": 0.5433, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 2.022692889561271, |
|
"grad_norm": 0.7854951620101929, |
|
"learning_rate": 4.066621017759984e-06, |
|
"loss": 0.57, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 2.027231467473525, |
|
"grad_norm": 0.7837132215499878, |
|
"learning_rate": 4.0615989238880215e-06, |
|
"loss": 0.5367, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 2.031770045385779, |
|
"grad_norm": 1.0618802309036255, |
|
"learning_rate": 4.056566475034136e-06, |
|
"loss": 0.5247, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 2.036308623298033, |
|
"grad_norm": 0.7492559552192688, |
|
"learning_rate": 4.051523704568557e-06, |
|
"loss": 0.5398, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.0408472012102874, |
|
"grad_norm": 1.0183978080749512, |
|
"learning_rate": 4.04647064592996e-06, |
|
"loss": 0.5487, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 2.0453857791225416, |
|
"grad_norm": 0.8974794745445251, |
|
"learning_rate": 4.041407332625238e-06, |
|
"loss": 0.5551, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 2.049924357034796, |
|
"grad_norm": 0.6771326661109924, |
|
"learning_rate": 4.0363337982292865e-06, |
|
"loss": 0.5647, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 2.05446293494705, |
|
"grad_norm": 0.9089450240135193, |
|
"learning_rate": 4.031250076384774e-06, |
|
"loss": 0.5743, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 2.059001512859304, |
|
"grad_norm": 0.641944944858551, |
|
"learning_rate": 4.026156200801924e-06, |
|
"loss": 0.5344, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.0635400907715584, |
|
"grad_norm": 0.7394944429397583, |
|
"learning_rate": 4.021052205258288e-06, |
|
"loss": 0.5856, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 2.068078668683812, |
|
"grad_norm": 2.491907835006714, |
|
"learning_rate": 4.015938123598525e-06, |
|
"loss": 0.5597, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 2.0726172465960664, |
|
"grad_norm": 1.3713676929473877, |
|
"learning_rate": 4.010813989734174e-06, |
|
"loss": 0.5324, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 2.0771558245083206, |
|
"grad_norm": 0.7260848879814148, |
|
"learning_rate": 4.00567983764343e-06, |
|
"loss": 0.5529, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 2.081694402420575, |
|
"grad_norm": 1.168824553489685, |
|
"learning_rate": 4.0005357013709215e-06, |
|
"loss": 0.6031, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.086232980332829, |
|
"grad_norm": 0.9732924699783325, |
|
"learning_rate": 3.995381615027477e-06, |
|
"loss": 0.5643, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 2.090771558245083, |
|
"grad_norm": 1.2031774520874023, |
|
"learning_rate": 3.990217612789909e-06, |
|
"loss": 0.5651, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 2.0953101361573374, |
|
"grad_norm": 0.7182034850120544, |
|
"learning_rate": 3.985043728900782e-06, |
|
"loss": 0.5507, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 2.0998487140695916, |
|
"grad_norm": 2.1731138229370117, |
|
"learning_rate": 3.979859997668182e-06, |
|
"loss": 0.5581, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 2.104387291981846, |
|
"grad_norm": 1.6525670289993286, |
|
"learning_rate": 3.9746664534654975e-06, |
|
"loss": 0.5827, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.1089258698941, |
|
"grad_norm": 0.9597675800323486, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 0.5685, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 2.1134644478063542, |
|
"grad_norm": 0.7419756650924683, |
|
"learning_rate": 3.964250063968537e-06, |
|
"loss": 0.5759, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 2.118003025718608, |
|
"grad_norm": 0.76424241065979, |
|
"learning_rate": 3.959027287745471e-06, |
|
"loss": 0.5648, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 2.122541603630862, |
|
"grad_norm": 0.8645866513252258, |
|
"learning_rate": 3.95379483669428e-06, |
|
"loss": 0.5594, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 2.1270801815431164, |
|
"grad_norm": 1.0087001323699951, |
|
"learning_rate": 3.9485527455114095e-06, |
|
"loss": 0.5359, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.1316187594553706, |
|
"grad_norm": 1.055308222770691, |
|
"learning_rate": 3.943301048957233e-06, |
|
"loss": 0.5465, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 2.136157337367625, |
|
"grad_norm": 1.540602445602417, |
|
"learning_rate": 3.9380397818558154e-06, |
|
"loss": 0.5647, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 2.140695915279879, |
|
"grad_norm": 0.6511226892471313, |
|
"learning_rate": 3.932768979094685e-06, |
|
"loss": 0.5102, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 2.145234493192133, |
|
"grad_norm": 0.6932368278503418, |
|
"learning_rate": 3.927488675624599e-06, |
|
"loss": 0.5934, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 2.1497730711043874, |
|
"grad_norm": 0.6622692942619324, |
|
"learning_rate": 3.922198906459318e-06, |
|
"loss": 0.5783, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.1543116490166416, |
|
"grad_norm": 0.9521903991699219, |
|
"learning_rate": 3.916899706675366e-06, |
|
"loss": 0.5642, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 2.1588502269288954, |
|
"grad_norm": 1.9041461944580078, |
|
"learning_rate": 3.911591111411802e-06, |
|
"loss": 0.5631, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 2.1633888048411496, |
|
"grad_norm": 0.7302039861679077, |
|
"learning_rate": 3.906273155869988e-06, |
|
"loss": 0.5907, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 2.167927382753404, |
|
"grad_norm": 0.9459813237190247, |
|
"learning_rate": 3.900945875313353e-06, |
|
"loss": 0.5767, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 2.172465960665658, |
|
"grad_norm": 0.9118275046348572, |
|
"learning_rate": 3.895609305067162e-06, |
|
"loss": 0.5699, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.177004538577912, |
|
"grad_norm": 0.9915804266929626, |
|
"learning_rate": 3.890263480518278e-06, |
|
"loss": 0.5692, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 2.1815431164901664, |
|
"grad_norm": 0.7956082820892334, |
|
"learning_rate": 3.884908437114931e-06, |
|
"loss": 0.5809, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 2.1860816944024206, |
|
"grad_norm": 0.7874560952186584, |
|
"learning_rate": 3.879544210366479e-06, |
|
"loss": 0.5426, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 2.190620272314675, |
|
"grad_norm": 0.672660231590271, |
|
"learning_rate": 3.8741708358431776e-06, |
|
"loss": 0.5562, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 2.195158850226929, |
|
"grad_norm": 0.9052623510360718, |
|
"learning_rate": 3.868788349175939e-06, |
|
"loss": 0.5946, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.1996974281391832, |
|
"grad_norm": 2.023263692855835, |
|
"learning_rate": 3.863396786056102e-06, |
|
"loss": 0.5571, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 2.2042360060514374, |
|
"grad_norm": 0.7047929167747498, |
|
"learning_rate": 3.8579961822351856e-06, |
|
"loss": 0.5659, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 2.208774583963691, |
|
"grad_norm": 0.6957628130912781, |
|
"learning_rate": 3.852586573524663e-06, |
|
"loss": 0.5478, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 2.2133131618759454, |
|
"grad_norm": 1.1220340728759766, |
|
"learning_rate": 3.847167995795716e-06, |
|
"loss": 0.5478, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 2.2178517397881996, |
|
"grad_norm": 1.4658353328704834, |
|
"learning_rate": 3.841740484979002e-06, |
|
"loss": 0.5418, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.222390317700454, |
|
"grad_norm": 0.7847384810447693, |
|
"learning_rate": 3.836304077064412e-06, |
|
"loss": 0.5784, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 2.226928895612708, |
|
"grad_norm": 1.5285112857818604, |
|
"learning_rate": 3.830858808100835e-06, |
|
"loss": 0.5449, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.231467473524962, |
|
"grad_norm": 0.6902230978012085, |
|
"learning_rate": 3.825404714195917e-06, |
|
"loss": 0.567, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.2360060514372164, |
|
"grad_norm": 0.8884925842285156, |
|
"learning_rate": 3.819941831515825e-06, |
|
"loss": 0.5181, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.2405446293494706, |
|
"grad_norm": 1.5801842212677002, |
|
"learning_rate": 3.8144701962849973e-06, |
|
"loss": 0.5377, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.245083207261725, |
|
"grad_norm": 0.7058039307594299, |
|
"learning_rate": 3.80898984478592e-06, |
|
"loss": 0.5685, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.2496217851739786, |
|
"grad_norm": 0.6729607582092285, |
|
"learning_rate": 3.803500813358869e-06, |
|
"loss": 0.563, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.254160363086233, |
|
"grad_norm": 0.8975954055786133, |
|
"learning_rate": 3.7980031384016826e-06, |
|
"loss": 0.5865, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.258698940998487, |
|
"grad_norm": 1.0153331756591797, |
|
"learning_rate": 3.79249685636951e-06, |
|
"loss": 0.5611, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.263237518910741, |
|
"grad_norm": 0.6788516640663147, |
|
"learning_rate": 3.7869820037745773e-06, |
|
"loss": 0.5417, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.2677760968229954, |
|
"grad_norm": 0.9200128316879272, |
|
"learning_rate": 3.7814586171859397e-06, |
|
"loss": 0.5621, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.2723146747352496, |
|
"grad_norm": 2.8803627490997314, |
|
"learning_rate": 3.775926733229243e-06, |
|
"loss": 0.5593, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.276853252647504, |
|
"grad_norm": 2.3636279106140137, |
|
"learning_rate": 3.770386388586479e-06, |
|
"loss": 0.5893, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.281391830559758, |
|
"grad_norm": 0.8512002825737, |
|
"learning_rate": 3.7648376199957416e-06, |
|
"loss": 0.5411, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.2859304084720122, |
|
"grad_norm": 1.220920205116272, |
|
"learning_rate": 3.7592804642509844e-06, |
|
"loss": 0.5077, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.2904689863842664, |
|
"grad_norm": 1.1704233884811401, |
|
"learning_rate": 3.7537149582017764e-06, |
|
"loss": 0.5412, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.2950075642965206, |
|
"grad_norm": 1.7733300924301147, |
|
"learning_rate": 3.7481411387530577e-06, |
|
"loss": 0.5297, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.2995461422087744, |
|
"grad_norm": 0.6807821393013, |
|
"learning_rate": 3.742559042864895e-06, |
|
"loss": 0.5729, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.3040847201210286, |
|
"grad_norm": 1.166459083557129, |
|
"learning_rate": 3.7369687075522355e-06, |
|
"loss": 0.528, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.308623298033283, |
|
"grad_norm": 0.7100872993469238, |
|
"learning_rate": 3.7313701698846616e-06, |
|
"loss": 0.5686, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.313161875945537, |
|
"grad_norm": 0.9993472099304199, |
|
"learning_rate": 3.725763466986147e-06, |
|
"loss": 0.5676, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.317700453857791, |
|
"grad_norm": 0.889721155166626, |
|
"learning_rate": 3.7201486360348075e-06, |
|
"loss": 0.5261, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.3222390317700454, |
|
"grad_norm": 0.6611590385437012, |
|
"learning_rate": 3.714525714262659e-06, |
|
"loss": 0.5292, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.3267776096822996, |
|
"grad_norm": 0.8129754662513733, |
|
"learning_rate": 3.708894738955364e-06, |
|
"loss": 0.5556, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.331316187594554, |
|
"grad_norm": 0.797924816608429, |
|
"learning_rate": 3.703255747451991e-06, |
|
"loss": 0.5294, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.335854765506808, |
|
"grad_norm": 0.7335793972015381, |
|
"learning_rate": 3.697608777144762e-06, |
|
"loss": 0.5144, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.340393343419062, |
|
"grad_norm": 0.70816570520401, |
|
"learning_rate": 3.691953865478809e-06, |
|
"loss": 0.5649, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.344931921331316, |
|
"grad_norm": 0.916517436504364, |
|
"learning_rate": 3.6862910499519204e-06, |
|
"loss": 0.5721, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.34947049924357, |
|
"grad_norm": 0.9996108412742615, |
|
"learning_rate": 3.680620368114297e-06, |
|
"loss": 0.5427, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.3540090771558244, |
|
"grad_norm": 0.7871035933494568, |
|
"learning_rate": 3.6749418575683005e-06, |
|
"loss": 0.5549, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.3585476550680786, |
|
"grad_norm": 0.7144160270690918, |
|
"learning_rate": 3.6692555559682052e-06, |
|
"loss": 0.5779, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.363086232980333, |
|
"grad_norm": 0.6424504518508911, |
|
"learning_rate": 3.6635615010199484e-06, |
|
"loss": 0.5392, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.367624810892587, |
|
"grad_norm": 1.4431594610214233, |
|
"learning_rate": 3.6578597304808784e-06, |
|
"loss": 0.5398, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.3721633888048412, |
|
"grad_norm": 0.834149181842804, |
|
"learning_rate": 3.6521502821595067e-06, |
|
"loss": 0.5421, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.3767019667170954, |
|
"grad_norm": 0.802004873752594, |
|
"learning_rate": 3.6464331939152576e-06, |
|
"loss": 0.5086, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.3812405446293496, |
|
"grad_norm": 0.73563152551651, |
|
"learning_rate": 3.6407085036582134e-06, |
|
"loss": 0.5406, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.385779122541604, |
|
"grad_norm": 1.410982608795166, |
|
"learning_rate": 3.634976249348867e-06, |
|
"loss": 0.568, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.3903177004538576, |
|
"grad_norm": 0.8028589487075806, |
|
"learning_rate": 3.629236468997868e-06, |
|
"loss": 0.5085, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.394856278366112, |
|
"grad_norm": 0.8530763387680054, |
|
"learning_rate": 3.6234892006657716e-06, |
|
"loss": 0.5324, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.399394856278366, |
|
"grad_norm": 0.8291765451431274, |
|
"learning_rate": 3.6177344824627854e-06, |
|
"loss": 0.5593, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.40393343419062, |
|
"grad_norm": 0.7214178442955017, |
|
"learning_rate": 3.6119723525485173e-06, |
|
"loss": 0.5302, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.4084720121028744, |
|
"grad_norm": 0.9748385548591614, |
|
"learning_rate": 3.606202849131723e-06, |
|
"loss": 0.5362, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.4130105900151286, |
|
"grad_norm": 0.8343231678009033, |
|
"learning_rate": 3.600426010470051e-06, |
|
"loss": 0.5762, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.417549167927383, |
|
"grad_norm": 1.3052794933319092, |
|
"learning_rate": 3.594641874869792e-06, |
|
"loss": 0.5693, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.422087745839637, |
|
"grad_norm": 0.842367947101593, |
|
"learning_rate": 3.5888504806856194e-06, |
|
"loss": 0.5332, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.4266263237518912, |
|
"grad_norm": 0.7482140064239502, |
|
"learning_rate": 3.5830518663203412e-06, |
|
"loss": 0.5556, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.431164901664145, |
|
"grad_norm": 0.7388250827789307, |
|
"learning_rate": 3.5772460702246415e-06, |
|
"loss": 0.5375, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.435703479576399, |
|
"grad_norm": 0.78763747215271, |
|
"learning_rate": 3.5714331308968257e-06, |
|
"loss": 0.5476, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.4402420574886534, |
|
"grad_norm": 0.7993074059486389, |
|
"learning_rate": 3.5656130868825677e-06, |
|
"loss": 0.5168, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.4447806354009076, |
|
"grad_norm": 0.7094533443450928, |
|
"learning_rate": 3.5597859767746524e-06, |
|
"loss": 0.5483, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.449319213313162, |
|
"grad_norm": 0.6727566123008728, |
|
"learning_rate": 3.553951839212718e-06, |
|
"loss": 0.5623, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.453857791225416, |
|
"grad_norm": 0.8603148460388184, |
|
"learning_rate": 3.548110712883005e-06, |
|
"loss": 0.5737, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.4583963691376702, |
|
"grad_norm": 0.9948770999908447, |
|
"learning_rate": 3.5422626365180936e-06, |
|
"loss": 0.5717, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.4629349470499244, |
|
"grad_norm": 0.9731677174568176, |
|
"learning_rate": 3.5364076488966516e-06, |
|
"loss": 0.558, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.4674735249621786, |
|
"grad_norm": 1.0563665628433228, |
|
"learning_rate": 3.5305457888431747e-06, |
|
"loss": 0.5143, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.472012102874433, |
|
"grad_norm": 0.7944256663322449, |
|
"learning_rate": 3.5246770952277302e-06, |
|
"loss": 0.523, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.476550680786687, |
|
"grad_norm": 9.147004127502441, |
|
"learning_rate": 3.5188016069656986e-06, |
|
"loss": 0.5638, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 2.481089258698941, |
|
"grad_norm": 5.153329372406006, |
|
"learning_rate": 3.512919363017516e-06, |
|
"loss": 0.5689, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 2.485627836611195, |
|
"grad_norm": 3.699370861053467, |
|
"learning_rate": 3.5070304023884154e-06, |
|
"loss": 0.5571, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.4901664145234492, |
|
"grad_norm": 1.9909933805465698, |
|
"learning_rate": 3.501134764128167e-06, |
|
"loss": 0.5493, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.4947049924357034, |
|
"grad_norm": 0.8108904957771301, |
|
"learning_rate": 3.495232487330822e-06, |
|
"loss": 0.5545, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 2.4992435703479576, |
|
"grad_norm": 1.2555493116378784, |
|
"learning_rate": 3.489323611134452e-06, |
|
"loss": 0.5634, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 2.503782148260212, |
|
"grad_norm": 1.1967525482177734, |
|
"learning_rate": 3.4834081747208888e-06, |
|
"loss": 0.5767, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 2.508320726172466, |
|
"grad_norm": 1.1422444581985474, |
|
"learning_rate": 3.477486217315464e-06, |
|
"loss": 0.5774, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 2.5128593040847202, |
|
"grad_norm": 0.7501647472381592, |
|
"learning_rate": 3.4715577781867516e-06, |
|
"loss": 0.5301, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.517397881996974, |
|
"grad_norm": 0.6840022802352905, |
|
"learning_rate": 3.465622896646305e-06, |
|
"loss": 0.5548, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 2.521936459909228, |
|
"grad_norm": 0.7143796682357788, |
|
"learning_rate": 3.4596816120483985e-06, |
|
"loss": 0.4968, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 2.5264750378214824, |
|
"grad_norm": 0.7957927584648132, |
|
"learning_rate": 3.453733963789764e-06, |
|
"loss": 0.561, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.5310136157337366, |
|
"grad_norm": 2.5668840408325195, |
|
"learning_rate": 3.4477799913093303e-06, |
|
"loss": 0.5766, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 2.535552193645991, |
|
"grad_norm": 0.8086560368537903, |
|
"learning_rate": 3.441819734087963e-06, |
|
"loss": 0.5794, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.540090771558245, |
|
"grad_norm": 0.7480419874191284, |
|
"learning_rate": 3.4358532316482037e-06, |
|
"loss": 0.5783, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.5446293494704992, |
|
"grad_norm": 0.6926214098930359, |
|
"learning_rate": 3.4298805235540033e-06, |
|
"loss": 0.5488, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.5491679273827534, |
|
"grad_norm": 0.6664040088653564, |
|
"learning_rate": 3.4239016494104636e-06, |
|
"loss": 0.5399, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 2.5537065052950076, |
|
"grad_norm": 1.0281089544296265, |
|
"learning_rate": 3.417916648863574e-06, |
|
"loss": 0.5698, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 2.558245083207262, |
|
"grad_norm": 1.0963114500045776, |
|
"learning_rate": 3.411925561599947e-06, |
|
"loss": 0.5392, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.562783661119516, |
|
"grad_norm": 0.7198578715324402, |
|
"learning_rate": 3.405928427346557e-06, |
|
"loss": 0.5643, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 2.5673222390317703, |
|
"grad_norm": 0.8547159433364868, |
|
"learning_rate": 3.3999252858704775e-06, |
|
"loss": 0.5077, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 2.5718608169440245, |
|
"grad_norm": 0.7263000011444092, |
|
"learning_rate": 3.3939161769786124e-06, |
|
"loss": 0.496, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 2.5763993948562782, |
|
"grad_norm": 0.6732318997383118, |
|
"learning_rate": 3.387901140517438e-06, |
|
"loss": 0.5575, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 2.5809379727685324, |
|
"grad_norm": 1.9751555919647217, |
|
"learning_rate": 3.3818802163727377e-06, |
|
"loss": 0.5257, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.5854765506807866, |
|
"grad_norm": 0.7534604668617249, |
|
"learning_rate": 3.3758534444693323e-06, |
|
"loss": 0.5367, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 2.590015128593041, |
|
"grad_norm": 0.6703462600708008, |
|
"learning_rate": 3.3698208647708226e-06, |
|
"loss": 0.5362, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 2.594553706505295, |
|
"grad_norm": 1.8879098892211914, |
|
"learning_rate": 3.36378251727932e-06, |
|
"loss": 0.5571, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 2.5990922844175492, |
|
"grad_norm": 0.6916099786758423, |
|
"learning_rate": 3.357738442035181e-06, |
|
"loss": 0.5411, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 2.6036308623298035, |
|
"grad_norm": 0.7827025651931763, |
|
"learning_rate": 3.3516886791167446e-06, |
|
"loss": 0.5316, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.608169440242057, |
|
"grad_norm": 1.1546365022659302, |
|
"learning_rate": 3.345633268640064e-06, |
|
"loss": 0.5164, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 2.6127080181543114, |
|
"grad_norm": 1.0985122919082642, |
|
"learning_rate": 3.3395722507586413e-06, |
|
"loss": 0.592, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 2.6172465960665656, |
|
"grad_norm": 0.7895015478134155, |
|
"learning_rate": 3.333505665663162e-06, |
|
"loss": 0.5393, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 2.62178517397882, |
|
"grad_norm": 0.8720937967300415, |
|
"learning_rate": 3.327433553581227e-06, |
|
"loss": 0.5474, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 2.626323751891074, |
|
"grad_norm": 0.7074962854385376, |
|
"learning_rate": 3.3213559547770873e-06, |
|
"loss": 0.562, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.6308623298033282, |
|
"grad_norm": 1.0457886457443237, |
|
"learning_rate": 3.3152729095513762e-06, |
|
"loss": 0.5188, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 2.6354009077155824, |
|
"grad_norm": 1.1377204656600952, |
|
"learning_rate": 3.309184458240843e-06, |
|
"loss": 0.548, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 2.6399394856278366, |
|
"grad_norm": 1.2311692237854004, |
|
"learning_rate": 3.303090641218083e-06, |
|
"loss": 0.5613, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 2.644478063540091, |
|
"grad_norm": 0.7034773826599121, |
|
"learning_rate": 3.2969914988912746e-06, |
|
"loss": 0.5469, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 2.649016641452345, |
|
"grad_norm": 0.9577970504760742, |
|
"learning_rate": 3.290887071703905e-06, |
|
"loss": 0.5377, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.6535552193645993, |
|
"grad_norm": 0.9517496228218079, |
|
"learning_rate": 3.284777400134507e-06, |
|
"loss": 0.5616, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.6580937972768535, |
|
"grad_norm": 0.8925998210906982, |
|
"learning_rate": 3.2786625246963903e-06, |
|
"loss": 0.539, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 2.6626323751891077, |
|
"grad_norm": 0.7537391185760498, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 0.5388, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 2.6671709531013614, |
|
"grad_norm": 0.8499334454536438, |
|
"learning_rate": 3.2664173244394965e-06, |
|
"loss": 0.5409, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 2.6717095310136156, |
|
"grad_norm": 1.3763985633850098, |
|
"learning_rate": 3.2602870808187955e-06, |
|
"loss": 0.5425, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.67624810892587, |
|
"grad_norm": 0.7515591382980347, |
|
"learning_rate": 3.2541517957249868e-06, |
|
"loss": 0.5623, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 2.680786686838124, |
|
"grad_norm": 0.7507291436195374, |
|
"learning_rate": 3.2480115098412234e-06, |
|
"loss": 0.5335, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 2.6853252647503782, |
|
"grad_norm": 0.7022225856781006, |
|
"learning_rate": 3.2418662638838166e-06, |
|
"loss": 0.5536, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 2.6898638426626325, |
|
"grad_norm": 0.7832311987876892, |
|
"learning_rate": 3.2357160986019697e-06, |
|
"loss": 0.5606, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.6944024205748867, |
|
"grad_norm": 1.096700668334961, |
|
"learning_rate": 3.2295610547775054e-06, |
|
"loss": 0.5602, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.6989409984871404, |
|
"grad_norm": 0.837489902973175, |
|
"learning_rate": 3.2234011732245953e-06, |
|
"loss": 0.5241, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.7034795763993946, |
|
"grad_norm": 0.7516577839851379, |
|
"learning_rate": 3.2172364947894914e-06, |
|
"loss": 0.5557, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 2.708018154311649, |
|
"grad_norm": 1.0745545625686646, |
|
"learning_rate": 3.211067060350253e-06, |
|
"loss": 0.5634, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.712556732223903, |
|
"grad_norm": 1.0247219800949097, |
|
"learning_rate": 3.204892910816476e-06, |
|
"loss": 0.5323, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.7170953101361572, |
|
"grad_norm": 0.7615066766738892, |
|
"learning_rate": 3.198714087129024e-06, |
|
"loss": 0.5474, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.7216338880484114, |
|
"grad_norm": 0.8073952198028564, |
|
"learning_rate": 3.1925306302597535e-06, |
|
"loss": 0.5344, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 2.7261724659606656, |
|
"grad_norm": 1.2883327007293701, |
|
"learning_rate": 3.1863425812112437e-06, |
|
"loss": 0.5876, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.73071104387292, |
|
"grad_norm": 2.0846781730651855, |
|
"learning_rate": 3.1801499810165254e-06, |
|
"loss": 0.5452, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 2.735249621785174, |
|
"grad_norm": 0.8865274786949158, |
|
"learning_rate": 3.1739528707388066e-06, |
|
"loss": 0.5386, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 2.7397881996974283, |
|
"grad_norm": 1.5364603996276855, |
|
"learning_rate": 3.1677512914712044e-06, |
|
"loss": 0.5549, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.7443267776096825, |
|
"grad_norm": 0.7343050837516785, |
|
"learning_rate": 3.1615452843364674e-06, |
|
"loss": 0.5359, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 2.7488653555219367, |
|
"grad_norm": 0.9987273812294006, |
|
"learning_rate": 3.155334890486707e-06, |
|
"loss": 0.5584, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 2.753403933434191, |
|
"grad_norm": 1.2430927753448486, |
|
"learning_rate": 3.149120151103121e-06, |
|
"loss": 0.5835, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 2.7579425113464446, |
|
"grad_norm": 0.901343047618866, |
|
"learning_rate": 3.142901107395724e-06, |
|
"loss": 0.5629, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 2.762481089258699, |
|
"grad_norm": 1.0777499675750732, |
|
"learning_rate": 3.1366778006030717e-06, |
|
"loss": 0.5638, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.767019667170953, |
|
"grad_norm": 0.6995704174041748, |
|
"learning_rate": 3.130450271991991e-06, |
|
"loss": 0.5628, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 2.7715582450832073, |
|
"grad_norm": 0.8021388053894043, |
|
"learning_rate": 3.1242185628573e-06, |
|
"loss": 0.5471, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 2.7760968229954615, |
|
"grad_norm": 1.0073556900024414, |
|
"learning_rate": 3.117982714521541e-06, |
|
"loss": 0.5375, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 2.7806354009077157, |
|
"grad_norm": 1.3955570459365845, |
|
"learning_rate": 3.1117427683347003e-06, |
|
"loss": 0.5481, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 2.78517397881997, |
|
"grad_norm": 0.7922578454017639, |
|
"learning_rate": 3.1054987656739395e-06, |
|
"loss": 0.5338, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.789712556732224, |
|
"grad_norm": 0.6501929759979248, |
|
"learning_rate": 3.0992507479433193e-06, |
|
"loss": 0.5076, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 2.794251134644478, |
|
"grad_norm": 1.006299614906311, |
|
"learning_rate": 3.0929987565735214e-06, |
|
"loss": 0.555, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 2.798789712556732, |
|
"grad_norm": 0.7578917145729065, |
|
"learning_rate": 3.0867428330215793e-06, |
|
"loss": 0.5359, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 2.8033282904689862, |
|
"grad_norm": 0.828425943851471, |
|
"learning_rate": 3.0804830187706005e-06, |
|
"loss": 0.5308, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 2.8078668683812404, |
|
"grad_norm": 0.7022498846054077, |
|
"learning_rate": 3.0742193553294896e-06, |
|
"loss": 0.5861, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.8124054462934946, |
|
"grad_norm": 0.6671915054321289, |
|
"learning_rate": 3.067951884232678e-06, |
|
"loss": 0.5764, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 2.816944024205749, |
|
"grad_norm": 0.9621557593345642, |
|
"learning_rate": 3.0616806470398453e-06, |
|
"loss": 0.5567, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 2.821482602118003, |
|
"grad_norm": 0.7543350458145142, |
|
"learning_rate": 3.055405685335643e-06, |
|
"loss": 0.5249, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 2.8260211800302573, |
|
"grad_norm": 0.8421291708946228, |
|
"learning_rate": 3.0491270407294195e-06, |
|
"loss": 0.5657, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 2.8305597579425115, |
|
"grad_norm": 0.7241919636726379, |
|
"learning_rate": 3.0428447548549466e-06, |
|
"loss": 0.5679, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.8350983358547657, |
|
"grad_norm": 0.7716313004493713, |
|
"learning_rate": 3.03655886937014e-06, |
|
"loss": 0.562, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 2.83963691376702, |
|
"grad_norm": 2.4703383445739746, |
|
"learning_rate": 3.030269425956784e-06, |
|
"loss": 0.5356, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 2.844175491679274, |
|
"grad_norm": 1.180924415588379, |
|
"learning_rate": 3.0239764663202565e-06, |
|
"loss": 0.5619, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 2.848714069591528, |
|
"grad_norm": 0.7013900876045227, |
|
"learning_rate": 3.017680032189252e-06, |
|
"loss": 0.5415, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 2.853252647503782, |
|
"grad_norm": 1.6009024381637573, |
|
"learning_rate": 3.011380165315503e-06, |
|
"loss": 0.5618, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.8577912254160363, |
|
"grad_norm": 1.0912448167800903, |
|
"learning_rate": 3.005076907473505e-06, |
|
"loss": 0.5821, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 2.8623298033282905, |
|
"grad_norm": 0.8813812732696533, |
|
"learning_rate": 2.9987703004602394e-06, |
|
"loss": 0.5405, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 2.8668683812405447, |
|
"grad_norm": 0.8485713601112366, |
|
"learning_rate": 2.9924603860948963e-06, |
|
"loss": 0.5189, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 2.871406959152799, |
|
"grad_norm": 0.6998408436775208, |
|
"learning_rate": 2.986147206218597e-06, |
|
"loss": 0.5483, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 2.875945537065053, |
|
"grad_norm": 0.7725822925567627, |
|
"learning_rate": 2.9798308026941147e-06, |
|
"loss": 0.5553, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.8804841149773073, |
|
"grad_norm": 1.0571022033691406, |
|
"learning_rate": 2.973511217405601e-06, |
|
"loss": 0.5628, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 2.885022692889561, |
|
"grad_norm": 2.2442429065704346, |
|
"learning_rate": 2.967188492258304e-06, |
|
"loss": 0.567, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 2.8895612708018152, |
|
"grad_norm": 1.3726474046707153, |
|
"learning_rate": 2.9608626691782927e-06, |
|
"loss": 0.546, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 2.8940998487140694, |
|
"grad_norm": 0.6917457580566406, |
|
"learning_rate": 2.9545337901121796e-06, |
|
"loss": 0.5322, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 2.8986384266263236, |
|
"grad_norm": 0.8719446659088135, |
|
"learning_rate": 2.9482018970268395e-06, |
|
"loss": 0.4992, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.903177004538578, |
|
"grad_norm": 0.8265273571014404, |
|
"learning_rate": 2.941867031909136e-06, |
|
"loss": 0.5641, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 2.907715582450832, |
|
"grad_norm": 0.9295107126235962, |
|
"learning_rate": 2.9355292367656363e-06, |
|
"loss": 0.5269, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 2.9122541603630863, |
|
"grad_norm": 1.0216796398162842, |
|
"learning_rate": 2.9291885536223415e-06, |
|
"loss": 0.543, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 2.9167927382753405, |
|
"grad_norm": 0.7285480499267578, |
|
"learning_rate": 2.9228450245243994e-06, |
|
"loss": 0.5021, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 2.9213313161875947, |
|
"grad_norm": 1.5654610395431519, |
|
"learning_rate": 2.91649869153583e-06, |
|
"loss": 0.5414, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.925869894099849, |
|
"grad_norm": 0.9667910933494568, |
|
"learning_rate": 2.910149596739248e-06, |
|
"loss": 0.5309, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 2.930408472012103, |
|
"grad_norm": 0.7517403364181519, |
|
"learning_rate": 2.9037977822355783e-06, |
|
"loss": 0.5385, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 2.9349470499243573, |
|
"grad_norm": 0.9054082036018372, |
|
"learning_rate": 2.8974432901437827e-06, |
|
"loss": 0.5404, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 2.939485627836611, |
|
"grad_norm": 0.8233144879341125, |
|
"learning_rate": 2.8910861626005774e-06, |
|
"loss": 0.5533, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 2.9440242057488653, |
|
"grad_norm": 0.7188256978988647, |
|
"learning_rate": 2.884726441760155e-06, |
|
"loss": 0.5448, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.9485627836611195, |
|
"grad_norm": 0.8374635577201843, |
|
"learning_rate": 2.878364169793903e-06, |
|
"loss": 0.5596, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 2.9531013615733737, |
|
"grad_norm": 1.4161272048950195, |
|
"learning_rate": 2.871999388890126e-06, |
|
"loss": 0.5863, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 2.957639939485628, |
|
"grad_norm": 1.0405840873718262, |
|
"learning_rate": 2.8656321412537653e-06, |
|
"loss": 0.5196, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 2.962178517397882, |
|
"grad_norm": 0.9523102641105652, |
|
"learning_rate": 2.85926246910612e-06, |
|
"loss": 0.5287, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 2.9667170953101363, |
|
"grad_norm": 1.1815980672836304, |
|
"learning_rate": 2.8528904146845652e-06, |
|
"loss": 0.5453, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.9712556732223905, |
|
"grad_norm": 2.321892499923706, |
|
"learning_rate": 2.8465160202422737e-06, |
|
"loss": 0.5703, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 2.9757942511346442, |
|
"grad_norm": 0.9072638750076294, |
|
"learning_rate": 2.840139328047934e-06, |
|
"loss": 0.5634, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 2.9803328290468984, |
|
"grad_norm": 0.8406242728233337, |
|
"learning_rate": 2.8337603803854713e-06, |
|
"loss": 0.5464, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 2.9848714069591527, |
|
"grad_norm": 0.9542201161384583, |
|
"learning_rate": 2.8273792195537663e-06, |
|
"loss": 0.5129, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 2.989409984871407, |
|
"grad_norm": 0.9052215814590454, |
|
"learning_rate": 2.820995887866378e-06, |
|
"loss": 0.5462, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.993948562783661, |
|
"grad_norm": 1.0007253885269165, |
|
"learning_rate": 2.8146104276512565e-06, |
|
"loss": 0.555, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 2.9984871406959153, |
|
"grad_norm": 0.9045431613922119, |
|
"learning_rate": 2.8082228812504693e-06, |
|
"loss": 0.5542, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.7561860084533691, |
|
"learning_rate": 2.801833291019915e-06, |
|
"loss": 0.1716, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 3.004538577912254, |
|
"grad_norm": 1.0536010265350342, |
|
"learning_rate": 2.7954416993290474e-06, |
|
"loss": 0.5426, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 3.0090771558245084, |
|
"grad_norm": 1.148224949836731, |
|
"learning_rate": 2.7890481485605898e-06, |
|
"loss": 0.531, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 3.0136157337367626, |
|
"grad_norm": 1.2489687204360962, |
|
"learning_rate": 2.7826526811102577e-06, |
|
"loss": 0.5283, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 3.018154311649017, |
|
"grad_norm": 0.7956582903862, |
|
"learning_rate": 2.7762553393864743e-06, |
|
"loss": 0.5213, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 3.022692889561271, |
|
"grad_norm": 1.8829700946807861, |
|
"learning_rate": 2.769856165810093e-06, |
|
"loss": 0.5051, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 3.027231467473525, |
|
"grad_norm": 0.6515493392944336, |
|
"learning_rate": 2.7634552028141137e-06, |
|
"loss": 0.5136, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 3.031770045385779, |
|
"grad_norm": 0.8646936416625977, |
|
"learning_rate": 2.757052492843401e-06, |
|
"loss": 0.5044, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.036308623298033, |
|
"grad_norm": 0.7098562717437744, |
|
"learning_rate": 2.750648078354406e-06, |
|
"loss": 0.5072, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 3.0408472012102874, |
|
"grad_norm": 0.7068854570388794, |
|
"learning_rate": 2.7442420018148797e-06, |
|
"loss": 0.4857, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 3.0453857791225416, |
|
"grad_norm": 0.8640517592430115, |
|
"learning_rate": 2.7378343057035956e-06, |
|
"loss": 0.5595, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 3.049924357034796, |
|
"grad_norm": 0.6892681121826172, |
|
"learning_rate": 2.7314250325100667e-06, |
|
"loss": 0.5237, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 3.05446293494705, |
|
"grad_norm": 0.6955535411834717, |
|
"learning_rate": 2.7250142247342637e-06, |
|
"loss": 0.5219, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.059001512859304, |
|
"grad_norm": 0.8527218103408813, |
|
"learning_rate": 2.718601924886332e-06, |
|
"loss": 0.5392, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 3.0635400907715584, |
|
"grad_norm": 0.7058966159820557, |
|
"learning_rate": 2.7121881754863126e-06, |
|
"loss": 0.5035, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 3.068078668683812, |
|
"grad_norm": 1.1376885175704956, |
|
"learning_rate": 2.7057730190638575e-06, |
|
"loss": 0.4946, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 3.0726172465960664, |
|
"grad_norm": 0.8086661100387573, |
|
"learning_rate": 2.699356498157949e-06, |
|
"loss": 0.4892, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 3.0771558245083206, |
|
"grad_norm": 0.6907851696014404, |
|
"learning_rate": 2.6929386553166165e-06, |
|
"loss": 0.5366, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.081694402420575, |
|
"grad_norm": 0.6702793836593628, |
|
"learning_rate": 2.686519533096656e-06, |
|
"loss": 0.497, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 3.086232980332829, |
|
"grad_norm": 1.3654205799102783, |
|
"learning_rate": 2.680099174063348e-06, |
|
"loss": 0.5224, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 3.090771558245083, |
|
"grad_norm": 0.7771999835968018, |
|
"learning_rate": 2.673677620790172e-06, |
|
"loss": 0.5089, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 3.0953101361573374, |
|
"grad_norm": 0.8415461182594299, |
|
"learning_rate": 2.667254915858529e-06, |
|
"loss": 0.5286, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 3.0998487140695916, |
|
"grad_norm": 0.8293631076812744, |
|
"learning_rate": 2.6608311018574545e-06, |
|
"loss": 0.4751, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.104387291981846, |
|
"grad_norm": 1.1603025197982788, |
|
"learning_rate": 2.6544062213833395e-06, |
|
"loss": 0.486, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 3.1089258698941, |
|
"grad_norm": 0.7829399704933167, |
|
"learning_rate": 2.647980317039646e-06, |
|
"loss": 0.5243, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 3.1134644478063542, |
|
"grad_norm": 0.6867077946662903, |
|
"learning_rate": 2.6415534314366264e-06, |
|
"loss": 0.519, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 3.118003025718608, |
|
"grad_norm": 0.8667089939117432, |
|
"learning_rate": 2.635125607191039e-06, |
|
"loss": 0.4992, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 3.122541603630862, |
|
"grad_norm": 0.8637856841087341, |
|
"learning_rate": 2.6286968869258666e-06, |
|
"loss": 0.5129, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.1270801815431164, |
|
"grad_norm": 0.6830906867980957, |
|
"learning_rate": 2.6222673132700335e-06, |
|
"loss": 0.524, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 3.1316187594553706, |
|
"grad_norm": 0.8948183059692383, |
|
"learning_rate": 2.615836928858122e-06, |
|
"loss": 0.5153, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 3.136157337367625, |
|
"grad_norm": 0.7270144820213318, |
|
"learning_rate": 2.609405776330092e-06, |
|
"loss": 0.5076, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 3.140695915279879, |
|
"grad_norm": 0.6997873783111572, |
|
"learning_rate": 2.6029738983309954e-06, |
|
"loss": 0.5008, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 3.145234493192133, |
|
"grad_norm": 0.7153282165527344, |
|
"learning_rate": 2.5965413375106965e-06, |
|
"loss": 0.5356, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 3.1497730711043874, |
|
"grad_norm": 0.8726050853729248, |
|
"learning_rate": 2.5901081365235852e-06, |
|
"loss": 0.5031, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 3.1543116490166416, |
|
"grad_norm": 0.9993102550506592, |
|
"learning_rate": 2.583674338028298e-06, |
|
"loss": 0.5487, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 3.1588502269288954, |
|
"grad_norm": 1.1097780466079712, |
|
"learning_rate": 2.5772399846874323e-06, |
|
"loss": 0.5031, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 3.1633888048411496, |
|
"grad_norm": 0.876848578453064, |
|
"learning_rate": 2.5708051191672658e-06, |
|
"loss": 0.5504, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 3.167927382753404, |
|
"grad_norm": 0.7725114226341248, |
|
"learning_rate": 2.5643697841374722e-06, |
|
"loss": 0.48, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.172465960665658, |
|
"grad_norm": 0.9931405186653137, |
|
"learning_rate": 2.557934022270837e-06, |
|
"loss": 0.5078, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 3.177004538577912, |
|
"grad_norm": 0.8958150744438171, |
|
"learning_rate": 2.551497876242978e-06, |
|
"loss": 0.5278, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 3.1815431164901664, |
|
"grad_norm": 1.0840779542922974, |
|
"learning_rate": 2.5450613887320606e-06, |
|
"loss": 0.5126, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 3.1860816944024206, |
|
"grad_norm": 0.6236334443092346, |
|
"learning_rate": 2.538624602418513e-06, |
|
"loss": 0.4677, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 3.190620272314675, |
|
"grad_norm": 0.731157660484314, |
|
"learning_rate": 2.5321875599847456e-06, |
|
"loss": 0.5218, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 3.195158850226929, |
|
"grad_norm": 0.8157845139503479, |
|
"learning_rate": 2.525750304114867e-06, |
|
"loss": 0.5423, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 3.1996974281391832, |
|
"grad_norm": 1.5604270696640015, |
|
"learning_rate": 2.519312877494401e-06, |
|
"loss": 0.4742, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 3.2042360060514374, |
|
"grad_norm": 0.7842020988464355, |
|
"learning_rate": 2.512875322810002e-06, |
|
"loss": 0.5113, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 3.208774583963691, |
|
"grad_norm": 0.8386942744255066, |
|
"learning_rate": 2.5064376827491786e-06, |
|
"loss": 0.4777, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 3.2133131618759454, |
|
"grad_norm": 2.208853244781494, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.5465, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.2178517397881996, |
|
"grad_norm": 0.8791889548301697, |
|
"learning_rate": 2.4935623172508223e-06, |
|
"loss": 0.5285, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 3.222390317700454, |
|
"grad_norm": 0.7482249140739441, |
|
"learning_rate": 2.4871246771899983e-06, |
|
"loss": 0.5327, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 3.226928895612708, |
|
"grad_norm": 0.6846868395805359, |
|
"learning_rate": 2.4806871225056006e-06, |
|
"loss": 0.5253, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 3.231467473524962, |
|
"grad_norm": 0.7808830738067627, |
|
"learning_rate": 2.474249695885134e-06, |
|
"loss": 0.5438, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 3.2360060514372164, |
|
"grad_norm": 0.7752459645271301, |
|
"learning_rate": 2.467812440015255e-06, |
|
"loss": 0.5232, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 3.2405446293494706, |
|
"grad_norm": 0.7170999646186829, |
|
"learning_rate": 2.461375397581487e-06, |
|
"loss": 0.4919, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 3.245083207261725, |
|
"grad_norm": 0.7586008906364441, |
|
"learning_rate": 2.4549386112679394e-06, |
|
"loss": 0.5218, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 3.2496217851739786, |
|
"grad_norm": 0.7302015423774719, |
|
"learning_rate": 2.448502123757022e-06, |
|
"loss": 0.5271, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 3.254160363086233, |
|
"grad_norm": 0.7475630044937134, |
|
"learning_rate": 2.4420659777291637e-06, |
|
"loss": 0.5189, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 3.258698940998487, |
|
"grad_norm": 0.6879778504371643, |
|
"learning_rate": 2.435630215862529e-06, |
|
"loss": 0.5322, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.263237518910741, |
|
"grad_norm": 0.7178695797920227, |
|
"learning_rate": 2.4291948808327346e-06, |
|
"loss": 0.511, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 3.2677760968229954, |
|
"grad_norm": 0.9109921455383301, |
|
"learning_rate": 2.422760015312568e-06, |
|
"loss": 0.4752, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 3.2723146747352496, |
|
"grad_norm": 0.6712206602096558, |
|
"learning_rate": 2.416325661971703e-06, |
|
"loss": 0.5074, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 3.276853252647504, |
|
"grad_norm": 1.5459402799606323, |
|
"learning_rate": 2.4098918634764156e-06, |
|
"loss": 0.502, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 3.281391830559758, |
|
"grad_norm": 0.7668531537055969, |
|
"learning_rate": 2.403458662489304e-06, |
|
"loss": 0.5012, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.2859304084720122, |
|
"grad_norm": 0.7114011645317078, |
|
"learning_rate": 2.397026101669005e-06, |
|
"loss": 0.5506, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 3.2904689863842664, |
|
"grad_norm": 0.749311625957489, |
|
"learning_rate": 2.3905942236699086e-06, |
|
"loss": 0.5321, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 3.2950075642965206, |
|
"grad_norm": 0.7871769070625305, |
|
"learning_rate": 2.3841630711418784e-06, |
|
"loss": 0.4805, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 3.2995461422087744, |
|
"grad_norm": 0.7565982937812805, |
|
"learning_rate": 2.377732686729967e-06, |
|
"loss": 0.5431, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 3.3040847201210286, |
|
"grad_norm": 0.8726269006729126, |
|
"learning_rate": 2.371303113074134e-06, |
|
"loss": 0.5153, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.308623298033283, |
|
"grad_norm": 0.7536921501159668, |
|
"learning_rate": 2.3648743928089612e-06, |
|
"loss": 0.4863, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 3.313161875945537, |
|
"grad_norm": 0.796808660030365, |
|
"learning_rate": 2.358446568563374e-06, |
|
"loss": 0.4908, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 3.317700453857791, |
|
"grad_norm": 0.7959126830101013, |
|
"learning_rate": 2.3520196829603547e-06, |
|
"loss": 0.5232, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 3.3222390317700454, |
|
"grad_norm": 1.1234790086746216, |
|
"learning_rate": 2.3455937786166613e-06, |
|
"loss": 0.5304, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 3.3267776096822996, |
|
"grad_norm": 0.687144935131073, |
|
"learning_rate": 2.3391688981425464e-06, |
|
"loss": 0.5027, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 3.331316187594554, |
|
"grad_norm": 0.7692601680755615, |
|
"learning_rate": 2.3327450841414716e-06, |
|
"loss": 0.5021, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 3.335854765506808, |
|
"grad_norm": 0.7073154449462891, |
|
"learning_rate": 2.3263223792098287e-06, |
|
"loss": 0.536, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 3.340393343419062, |
|
"grad_norm": 0.9052091240882874, |
|
"learning_rate": 2.3199008259366524e-06, |
|
"loss": 0.5473, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 3.344931921331316, |
|
"grad_norm": 0.8449652791023254, |
|
"learning_rate": 2.3134804669033437e-06, |
|
"loss": 0.5151, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 3.34947049924357, |
|
"grad_norm": 0.7386903166770935, |
|
"learning_rate": 2.3070613446833843e-06, |
|
"loss": 0.5218, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.3540090771558244, |
|
"grad_norm": 1.0674140453338623, |
|
"learning_rate": 2.300643501842052e-06, |
|
"loss": 0.4955, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 3.3585476550680786, |
|
"grad_norm": 0.6917946338653564, |
|
"learning_rate": 2.294226980936143e-06, |
|
"loss": 0.5357, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 3.363086232980333, |
|
"grad_norm": 0.735014021396637, |
|
"learning_rate": 2.287811824513688e-06, |
|
"loss": 0.5045, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 3.367624810892587, |
|
"grad_norm": 0.9542885422706604, |
|
"learning_rate": 2.2813980751136686e-06, |
|
"loss": 0.52, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 3.3721633888048412, |
|
"grad_norm": 0.8511250019073486, |
|
"learning_rate": 2.274985775265737e-06, |
|
"loss": 0.5107, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.3767019667170954, |
|
"grad_norm": 0.7510440945625305, |
|
"learning_rate": 2.2685749674899346e-06, |
|
"loss": 0.4872, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 3.3812405446293496, |
|
"grad_norm": 0.7399076819419861, |
|
"learning_rate": 2.262165694296406e-06, |
|
"loss": 0.5187, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 3.385779122541604, |
|
"grad_norm": 0.6950759887695312, |
|
"learning_rate": 2.255757998185122e-06, |
|
"loss": 0.5205, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 3.3903177004538576, |
|
"grad_norm": 0.736414909362793, |
|
"learning_rate": 2.2493519216455945e-06, |
|
"loss": 0.5092, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 3.394856278366112, |
|
"grad_norm": 1.004551649093628, |
|
"learning_rate": 2.242947507156599e-06, |
|
"loss": 0.5326, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.399394856278366, |
|
"grad_norm": 3.4429965019226074, |
|
"learning_rate": 2.2365447971858868e-06, |
|
"loss": 0.547, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 3.40393343419062, |
|
"grad_norm": 0.672024130821228, |
|
"learning_rate": 2.2301438341899073e-06, |
|
"loss": 0.4979, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 3.4084720121028744, |
|
"grad_norm": 0.8082073926925659, |
|
"learning_rate": 2.223744660613526e-06, |
|
"loss": 0.5099, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 3.4130105900151286, |
|
"grad_norm": 0.7335776090621948, |
|
"learning_rate": 2.217347318889743e-06, |
|
"loss": 0.5196, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 3.417549167927383, |
|
"grad_norm": 0.791305422782898, |
|
"learning_rate": 2.210951851439411e-06, |
|
"loss": 0.5072, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.422087745839637, |
|
"grad_norm": 0.8634244203567505, |
|
"learning_rate": 2.204558300670954e-06, |
|
"loss": 0.5095, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 3.4266263237518912, |
|
"grad_norm": 0.910862147808075, |
|
"learning_rate": 2.198166708980086e-06, |
|
"loss": 0.5334, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 3.431164901664145, |
|
"grad_norm": 0.685192346572876, |
|
"learning_rate": 2.191777118749532e-06, |
|
"loss": 0.5061, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 3.435703479576399, |
|
"grad_norm": 0.7736103534698486, |
|
"learning_rate": 2.185389572348745e-06, |
|
"loss": 0.5209, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 3.4402420574886534, |
|
"grad_norm": 2.14925217628479, |
|
"learning_rate": 2.1790041121336223e-06, |
|
"loss": 0.5325, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.4447806354009076, |
|
"grad_norm": 0.7848958969116211, |
|
"learning_rate": 2.1726207804462336e-06, |
|
"loss": 0.5217, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 3.449319213313162, |
|
"grad_norm": 0.8946971893310547, |
|
"learning_rate": 2.1662396196145295e-06, |
|
"loss": 0.5028, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 3.453857791225416, |
|
"grad_norm": 0.7946920394897461, |
|
"learning_rate": 2.1598606719520663e-06, |
|
"loss": 0.4991, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 3.4583963691376702, |
|
"grad_norm": 0.707175076007843, |
|
"learning_rate": 2.153483979757727e-06, |
|
"loss": 0.5172, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 3.4629349470499244, |
|
"grad_norm": 0.8119880557060242, |
|
"learning_rate": 2.147109585315435e-06, |
|
"loss": 0.4985, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.4674735249621786, |
|
"grad_norm": 0.8942325711250305, |
|
"learning_rate": 2.1407375308938807e-06, |
|
"loss": 0.4909, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 3.472012102874433, |
|
"grad_norm": 0.9520535469055176, |
|
"learning_rate": 2.134367858746236e-06, |
|
"loss": 0.5228, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 3.476550680786687, |
|
"grad_norm": 0.9031659364700317, |
|
"learning_rate": 2.1280006111098754e-06, |
|
"loss": 0.5083, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 3.481089258698941, |
|
"grad_norm": 0.9307310581207275, |
|
"learning_rate": 2.1216358302060987e-06, |
|
"loss": 0.5067, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 3.485627836611195, |
|
"grad_norm": 0.7787923812866211, |
|
"learning_rate": 2.1152735582398453e-06, |
|
"loss": 0.4929, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.4901664145234492, |
|
"grad_norm": 0.7476038932800293, |
|
"learning_rate": 2.1089138373994226e-06, |
|
"loss": 0.5082, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 3.4947049924357034, |
|
"grad_norm": 1.7294312715530396, |
|
"learning_rate": 2.1025567098562177e-06, |
|
"loss": 0.511, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 3.4992435703479576, |
|
"grad_norm": 1.9948171377182007, |
|
"learning_rate": 2.096202217764422e-06, |
|
"loss": 0.5264, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 3.503782148260212, |
|
"grad_norm": 0.7554813623428345, |
|
"learning_rate": 2.089850403260753e-06, |
|
"loss": 0.4951, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 3.508320726172466, |
|
"grad_norm": 0.8657441139221191, |
|
"learning_rate": 2.0835013084641704e-06, |
|
"loss": 0.4973, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.5128593040847202, |
|
"grad_norm": 1.0366365909576416, |
|
"learning_rate": 2.0771549754756014e-06, |
|
"loss": 0.5198, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 3.517397881996974, |
|
"grad_norm": 0.9612494707107544, |
|
"learning_rate": 2.070811446377659e-06, |
|
"loss": 0.4916, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 3.521936459909228, |
|
"grad_norm": 0.7001491785049438, |
|
"learning_rate": 2.064470763234364e-06, |
|
"loss": 0.5143, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 3.5264750378214824, |
|
"grad_norm": 0.7265450954437256, |
|
"learning_rate": 2.0581329680908654e-06, |
|
"loss": 0.4852, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 3.5310136157337366, |
|
"grad_norm": 0.7637912631034851, |
|
"learning_rate": 2.0517981029731613e-06, |
|
"loss": 0.5101, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.535552193645991, |
|
"grad_norm": 0.7350311279296875, |
|
"learning_rate": 2.045466209887821e-06, |
|
"loss": 0.4884, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 3.540090771558245, |
|
"grad_norm": 2.7524001598358154, |
|
"learning_rate": 2.0391373308217077e-06, |
|
"loss": 0.5007, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 3.5446293494704992, |
|
"grad_norm": 0.773992657661438, |
|
"learning_rate": 2.032811507741697e-06, |
|
"loss": 0.5144, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 3.5491679273827534, |
|
"grad_norm": 0.7631218433380127, |
|
"learning_rate": 2.0264887825944e-06, |
|
"loss": 0.5249, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 3.5537065052950076, |
|
"grad_norm": 0.8911067843437195, |
|
"learning_rate": 2.020169197305886e-06, |
|
"loss": 0.5218, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.558245083207262, |
|
"grad_norm": 0.6976955533027649, |
|
"learning_rate": 2.013852793781404e-06, |
|
"loss": 0.4844, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 3.562783661119516, |
|
"grad_norm": 0.768477201461792, |
|
"learning_rate": 2.007539613905104e-06, |
|
"loss": 0.5026, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 3.5673222390317703, |
|
"grad_norm": 0.6874233484268188, |
|
"learning_rate": 2.0012296995397614e-06, |
|
"loss": 0.5416, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 3.5718608169440245, |
|
"grad_norm": 0.9146207571029663, |
|
"learning_rate": 1.9949230925264963e-06, |
|
"loss": 0.5285, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 3.5763993948562782, |
|
"grad_norm": 1.090366244316101, |
|
"learning_rate": 1.988619834684499e-06, |
|
"loss": 0.5133, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.5809379727685324, |
|
"grad_norm": 0.9968334436416626, |
|
"learning_rate": 1.982319967810749e-06, |
|
"loss": 0.4654, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 3.5854765506807866, |
|
"grad_norm": 0.7116790413856506, |
|
"learning_rate": 1.976023533679744e-06, |
|
"loss": 0.5077, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 3.590015128593041, |
|
"grad_norm": 1.0453625917434692, |
|
"learning_rate": 1.969730574043217e-06, |
|
"loss": 0.5124, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 3.594553706505295, |
|
"grad_norm": 0.7500141859054565, |
|
"learning_rate": 1.9634411306298614e-06, |
|
"loss": 0.4981, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 3.5990922844175492, |
|
"grad_norm": 0.9187857508659363, |
|
"learning_rate": 1.9571552451450542e-06, |
|
"loss": 0.5318, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.6036308623298035, |
|
"grad_norm": 0.7069258689880371, |
|
"learning_rate": 1.950872959270581e-06, |
|
"loss": 0.4844, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 3.608169440242057, |
|
"grad_norm": 0.8754012584686279, |
|
"learning_rate": 1.944594314664358e-06, |
|
"loss": 0.4946, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 3.6127080181543114, |
|
"grad_norm": 0.8324469327926636, |
|
"learning_rate": 1.938319352960156e-06, |
|
"loss": 0.5172, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 3.6172465960665656, |
|
"grad_norm": 0.7333508729934692, |
|
"learning_rate": 1.9320481157673225e-06, |
|
"loss": 0.5157, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 3.62178517397882, |
|
"grad_norm": 0.7321659326553345, |
|
"learning_rate": 1.9257806446705116e-06, |
|
"loss": 0.5297, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.626323751891074, |
|
"grad_norm": 0.7912946343421936, |
|
"learning_rate": 1.919516981229401e-06, |
|
"loss": 0.5017, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 3.6308623298033282, |
|
"grad_norm": 0.7116424441337585, |
|
"learning_rate": 1.9132571669784215e-06, |
|
"loss": 0.502, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 3.6354009077155824, |
|
"grad_norm": 0.6774429678916931, |
|
"learning_rate": 1.9070012434264793e-06, |
|
"loss": 0.547, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 3.6399394856278366, |
|
"grad_norm": 0.9747133851051331, |
|
"learning_rate": 1.9007492520566813e-06, |
|
"loss": 0.512, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 3.644478063540091, |
|
"grad_norm": 0.6881934404373169, |
|
"learning_rate": 1.8945012343260605e-06, |
|
"loss": 0.5357, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.649016641452345, |
|
"grad_norm": 0.7221348881721497, |
|
"learning_rate": 1.8882572316653003e-06, |
|
"loss": 0.5118, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 3.6535552193645993, |
|
"grad_norm": 0.7595603466033936, |
|
"learning_rate": 1.88201728547846e-06, |
|
"loss": 0.5288, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 3.6580937972768535, |
|
"grad_norm": 0.9129202961921692, |
|
"learning_rate": 1.8757814371427003e-06, |
|
"loss": 0.4976, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 3.6626323751891077, |
|
"grad_norm": 1.1038625240325928, |
|
"learning_rate": 1.8695497280080094e-06, |
|
"loss": 0.5428, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 3.6671709531013614, |
|
"grad_norm": 0.9567497372627258, |
|
"learning_rate": 1.8633221993969285e-06, |
|
"loss": 0.498, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.6717095310136156, |
|
"grad_norm": 0.7735843062400818, |
|
"learning_rate": 1.857098892604277e-06, |
|
"loss": 0.5222, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 3.67624810892587, |
|
"grad_norm": 0.9261084794998169, |
|
"learning_rate": 1.8508798488968805e-06, |
|
"loss": 0.4969, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 3.680786686838124, |
|
"grad_norm": 0.7963117361068726, |
|
"learning_rate": 1.844665109513294e-06, |
|
"loss": 0.5127, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 3.6853252647503782, |
|
"grad_norm": 0.9042128920555115, |
|
"learning_rate": 1.8384547156635324e-06, |
|
"loss": 0.5214, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 3.6898638426626325, |
|
"grad_norm": 0.9819813370704651, |
|
"learning_rate": 1.8322487085287953e-06, |
|
"loss": 0.5208, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 3.6944024205748867, |
|
"grad_norm": 0.7022226452827454, |
|
"learning_rate": 1.8260471292611936e-06, |
|
"loss": 0.5422, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 3.6989409984871404, |
|
"grad_norm": 0.7549095749855042, |
|
"learning_rate": 1.8198500189834757e-06, |
|
"loss": 0.5197, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 3.7034795763993946, |
|
"grad_norm": 1.0044478178024292, |
|
"learning_rate": 1.813657418788757e-06, |
|
"loss": 0.5134, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 3.708018154311649, |
|
"grad_norm": 0.7384189963340759, |
|
"learning_rate": 1.8074693697402473e-06, |
|
"loss": 0.5367, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 3.712556732223903, |
|
"grad_norm": 0.7035711407661438, |
|
"learning_rate": 1.8012859128709766e-06, |
|
"loss": 0.509, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.7170953101361572, |
|
"grad_norm": 4.242894649505615, |
|
"learning_rate": 1.7951070891835245e-06, |
|
"loss": 0.4983, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 3.7216338880484114, |
|
"grad_norm": 0.7296944856643677, |
|
"learning_rate": 1.7889329396497478e-06, |
|
"loss": 0.5133, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 3.7261724659606656, |
|
"grad_norm": 0.6937296390533447, |
|
"learning_rate": 1.7827635052105095e-06, |
|
"loss": 0.5226, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 3.73071104387292, |
|
"grad_norm": 0.6841968894004822, |
|
"learning_rate": 1.7765988267754053e-06, |
|
"loss": 0.5287, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 3.735249621785174, |
|
"grad_norm": 0.7006022930145264, |
|
"learning_rate": 1.7704389452224945e-06, |
|
"loss": 0.5037, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 3.7397881996974283, |
|
"grad_norm": 1.5364707708358765, |
|
"learning_rate": 1.7642839013980305e-06, |
|
"loss": 0.4777, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 3.7443267776096825, |
|
"grad_norm": 0.7404047846794128, |
|
"learning_rate": 1.7581337361161838e-06, |
|
"loss": 0.5111, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 3.7488653555219367, |
|
"grad_norm": 0.6842325925827026, |
|
"learning_rate": 1.7519884901587773e-06, |
|
"loss": 0.5349, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 3.753403933434191, |
|
"grad_norm": 1.1200604438781738, |
|
"learning_rate": 1.7458482042750138e-06, |
|
"loss": 0.4894, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 3.7579425113464446, |
|
"grad_norm": 0.6718960404396057, |
|
"learning_rate": 1.7397129191812058e-06, |
|
"loss": 0.4482, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.762481089258699, |
|
"grad_norm": 0.7164394855499268, |
|
"learning_rate": 1.7335826755605043e-06, |
|
"loss": 0.5047, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 3.767019667170953, |
|
"grad_norm": 0.8456952571868896, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 0.4874, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 3.7715582450832073, |
|
"grad_norm": 0.8413048386573792, |
|
"learning_rate": 1.7213374753036105e-06, |
|
"loss": 0.52, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 3.7760968229954615, |
|
"grad_norm": 0.7125583291053772, |
|
"learning_rate": 1.7152225998654934e-06, |
|
"loss": 0.4917, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 3.7806354009077157, |
|
"grad_norm": 0.7068613767623901, |
|
"learning_rate": 1.7091129282960966e-06, |
|
"loss": 0.5111, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 3.78517397881997, |
|
"grad_norm": 0.7550258636474609, |
|
"learning_rate": 1.703008501108726e-06, |
|
"loss": 0.5303, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 3.789712556732224, |
|
"grad_norm": 0.7415627241134644, |
|
"learning_rate": 1.696909358781917e-06, |
|
"loss": 0.5094, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 3.794251134644478, |
|
"grad_norm": 0.6911723017692566, |
|
"learning_rate": 1.6908155417591576e-06, |
|
"loss": 0.4979, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 3.798789712556732, |
|
"grad_norm": 0.7296070456504822, |
|
"learning_rate": 1.684727090448624e-06, |
|
"loss": 0.5099, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 3.8033282904689862, |
|
"grad_norm": 0.7332204580307007, |
|
"learning_rate": 1.6786440452229134e-06, |
|
"loss": 0.5198, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.8078668683812404, |
|
"grad_norm": 0.8778614401817322, |
|
"learning_rate": 1.6725664464187734e-06, |
|
"loss": 0.4959, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 3.8124054462934946, |
|
"grad_norm": 0.7139490246772766, |
|
"learning_rate": 1.6664943343368386e-06, |
|
"loss": 0.4968, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 3.816944024205749, |
|
"grad_norm": 0.7621096968650818, |
|
"learning_rate": 1.660427749241359e-06, |
|
"loss": 0.4932, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 3.821482602118003, |
|
"grad_norm": 0.6902374625205994, |
|
"learning_rate": 1.6543667313599366e-06, |
|
"loss": 0.5035, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 3.8260211800302573, |
|
"grad_norm": 0.6864549517631531, |
|
"learning_rate": 1.6483113208832562e-06, |
|
"loss": 0.5058, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 3.8305597579425115, |
|
"grad_norm": 0.7007944583892822, |
|
"learning_rate": 1.6422615579648202e-06, |
|
"loss": 0.5303, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 3.8350983358547657, |
|
"grad_norm": 5.041836738586426, |
|
"learning_rate": 1.6362174827206806e-06, |
|
"loss": 0.5142, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 3.83963691376702, |
|
"grad_norm": 0.7452664971351624, |
|
"learning_rate": 1.6301791352291774e-06, |
|
"loss": 0.4758, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 3.844175491679274, |
|
"grad_norm": 0.777967095375061, |
|
"learning_rate": 1.6241465555306679e-06, |
|
"loss": 0.5047, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 3.848714069591528, |
|
"grad_norm": 0.723528265953064, |
|
"learning_rate": 1.618119783627263e-06, |
|
"loss": 0.5263, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.853252647503782, |
|
"grad_norm": 0.6719141006469727, |
|
"learning_rate": 1.612098859482562e-06, |
|
"loss": 0.5024, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 3.8577912254160363, |
|
"grad_norm": 0.7341691255569458, |
|
"learning_rate": 1.6060838230213883e-06, |
|
"loss": 0.5148, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 3.8623298033282905, |
|
"grad_norm": 0.7549681663513184, |
|
"learning_rate": 1.6000747141295233e-06, |
|
"loss": 0.5314, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 3.8668683812405447, |
|
"grad_norm": 0.7185303568840027, |
|
"learning_rate": 1.594071572653444e-06, |
|
"loss": 0.5095, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 3.871406959152799, |
|
"grad_norm": 0.7704632878303528, |
|
"learning_rate": 1.5880744384000544e-06, |
|
"loss": 0.4827, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 3.875945537065053, |
|
"grad_norm": 0.7089985609054565, |
|
"learning_rate": 1.5820833511364275e-06, |
|
"loss": 0.5108, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 3.8804841149773073, |
|
"grad_norm": 0.7662360072135925, |
|
"learning_rate": 1.5760983505895377e-06, |
|
"loss": 0.5123, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 3.885022692889561, |
|
"grad_norm": 0.8514230847358704, |
|
"learning_rate": 1.570119476445997e-06, |
|
"loss": 0.5043, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 3.8895612708018152, |
|
"grad_norm": 0.6921412348747253, |
|
"learning_rate": 1.5641467683517967e-06, |
|
"loss": 0.4982, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 3.8940998487140694, |
|
"grad_norm": 0.6514143347740173, |
|
"learning_rate": 1.558180265912037e-06, |
|
"loss": 0.5285, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.8986384266263236, |
|
"grad_norm": 1.3229821920394897, |
|
"learning_rate": 1.5522200086906708e-06, |
|
"loss": 0.5135, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 3.903177004538578, |
|
"grad_norm": 0.6929940581321716, |
|
"learning_rate": 1.5462660362102371e-06, |
|
"loss": 0.5027, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 3.907715582450832, |
|
"grad_norm": 0.8205671906471252, |
|
"learning_rate": 1.5403183879516025e-06, |
|
"loss": 0.5031, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 3.9122541603630863, |
|
"grad_norm": 0.7823313474655151, |
|
"learning_rate": 1.534377103353696e-06, |
|
"loss": 0.5212, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 3.9167927382753405, |
|
"grad_norm": 1.095670223236084, |
|
"learning_rate": 1.5284422218132495e-06, |
|
"loss": 0.4762, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 3.9213313161875947, |
|
"grad_norm": 0.7315182685852051, |
|
"learning_rate": 1.5225137826845371e-06, |
|
"loss": 0.5012, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 3.925869894099849, |
|
"grad_norm": 1.2629145383834839, |
|
"learning_rate": 1.5165918252791125e-06, |
|
"loss": 0.488, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 3.930408472012103, |
|
"grad_norm": 0.8126862049102783, |
|
"learning_rate": 1.510676388865548e-06, |
|
"loss": 0.4999, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 3.9349470499243573, |
|
"grad_norm": 0.7050077319145203, |
|
"learning_rate": 1.5047675126691783e-06, |
|
"loss": 0.4863, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 3.939485627836611, |
|
"grad_norm": 0.8243216276168823, |
|
"learning_rate": 1.4988652358718336e-06, |
|
"loss": 0.5145, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.9440242057488653, |
|
"grad_norm": 0.9307572245597839, |
|
"learning_rate": 1.4929695976115854e-06, |
|
"loss": 0.5001, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 3.9485627836611195, |
|
"grad_norm": 0.6967872381210327, |
|
"learning_rate": 1.4870806369824847e-06, |
|
"loss": 0.5402, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 3.9531013615733737, |
|
"grad_norm": 0.7018783092498779, |
|
"learning_rate": 1.4811983930343018e-06, |
|
"loss": 0.5262, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 3.957639939485628, |
|
"grad_norm": 0.6808320879936218, |
|
"learning_rate": 1.4753229047722704e-06, |
|
"loss": 0.5046, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 3.962178517397882, |
|
"grad_norm": 1.0063635110855103, |
|
"learning_rate": 1.4694542111568261e-06, |
|
"loss": 0.5292, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 3.9667170953101363, |
|
"grad_norm": 0.7578749060630798, |
|
"learning_rate": 1.4635923511033496e-06, |
|
"loss": 0.4964, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 3.9712556732223905, |
|
"grad_norm": 0.7392986416816711, |
|
"learning_rate": 1.4577373634819075e-06, |
|
"loss": 0.5081, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 3.9757942511346442, |
|
"grad_norm": 0.737310528755188, |
|
"learning_rate": 1.451889287116996e-06, |
|
"loss": 0.5507, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 3.9803328290468984, |
|
"grad_norm": 1.102256417274475, |
|
"learning_rate": 1.446048160787282e-06, |
|
"loss": 0.5041, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 3.9848714069591527, |
|
"grad_norm": 0.790909469127655, |
|
"learning_rate": 1.4402140232253486e-06, |
|
"loss": 0.5064, |
|
"step": 880 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1320, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 6, |
|
"save_steps": 220, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.6647993727894684e+19, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|