|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 0, |
|
"global_step": 246, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0040650406504065045, |
|
"grad_norm": 1.3748667240142822, |
|
"learning_rate": 1e-05, |
|
"loss": 2.5758, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008130081300813009, |
|
"grad_norm": 1.2784767150878906, |
|
"learning_rate": 9.959349593495936e-06, |
|
"loss": 2.5216, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.012195121951219513, |
|
"grad_norm": 1.2706096172332764, |
|
"learning_rate": 9.91869918699187e-06, |
|
"loss": 2.5553, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.016260162601626018, |
|
"grad_norm": 1.2058460712432861, |
|
"learning_rate": 9.878048780487805e-06, |
|
"loss": 2.5158, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02032520325203252, |
|
"grad_norm": 1.1286356449127197, |
|
"learning_rate": 9.837398373983741e-06, |
|
"loss": 2.4486, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.024390243902439025, |
|
"grad_norm": 1.125715732574463, |
|
"learning_rate": 9.796747967479675e-06, |
|
"loss": 2.4741, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.028455284552845527, |
|
"grad_norm": 1.089099407196045, |
|
"learning_rate": 9.756097560975611e-06, |
|
"loss": 2.4565, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.032520325203252036, |
|
"grad_norm": 1.068673014640808, |
|
"learning_rate": 9.715447154471546e-06, |
|
"loss": 2.4487, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.036585365853658534, |
|
"grad_norm": 0.9413436055183411, |
|
"learning_rate": 9.67479674796748e-06, |
|
"loss": 2.3395, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04065040650406504, |
|
"grad_norm": 0.8963584303855896, |
|
"learning_rate": 9.634146341463415e-06, |
|
"loss": 2.3072, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.044715447154471545, |
|
"grad_norm": 0.9504267573356628, |
|
"learning_rate": 9.59349593495935e-06, |
|
"loss": 2.4042, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04878048780487805, |
|
"grad_norm": 0.8742660284042358, |
|
"learning_rate": 9.552845528455286e-06, |
|
"loss": 2.3433, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.052845528455284556, |
|
"grad_norm": 0.8436909914016724, |
|
"learning_rate": 9.51219512195122e-06, |
|
"loss": 2.2921, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.056910569105691054, |
|
"grad_norm": 0.8690633177757263, |
|
"learning_rate": 9.471544715447156e-06, |
|
"loss": 2.3208, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06097560975609756, |
|
"grad_norm": 0.8250911235809326, |
|
"learning_rate": 9.43089430894309e-06, |
|
"loss": 2.2882, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06504065040650407, |
|
"grad_norm": 0.8128834962844849, |
|
"learning_rate": 9.390243902439025e-06, |
|
"loss": 2.2526, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.06910569105691057, |
|
"grad_norm": 0.8213804960250854, |
|
"learning_rate": 9.34959349593496e-06, |
|
"loss": 2.2962, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.07317073170731707, |
|
"grad_norm": 0.7577650547027588, |
|
"learning_rate": 9.308943089430895e-06, |
|
"loss": 2.2449, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.07723577235772358, |
|
"grad_norm": 0.790264368057251, |
|
"learning_rate": 9.268292682926831e-06, |
|
"loss": 2.2296, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.08130081300813008, |
|
"grad_norm": 0.7232661247253418, |
|
"learning_rate": 9.227642276422764e-06, |
|
"loss": 2.1508, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08536585365853659, |
|
"grad_norm": 0.7576490640640259, |
|
"learning_rate": 9.1869918699187e-06, |
|
"loss": 2.2099, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.08943089430894309, |
|
"grad_norm": 0.7341296672821045, |
|
"learning_rate": 9.146341463414635e-06, |
|
"loss": 2.1892, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.09349593495934959, |
|
"grad_norm": 0.7156996130943298, |
|
"learning_rate": 9.10569105691057e-06, |
|
"loss": 2.1675, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0975609756097561, |
|
"grad_norm": 0.6440656185150146, |
|
"learning_rate": 9.065040650406505e-06, |
|
"loss": 2.0993, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.1016260162601626, |
|
"grad_norm": 0.6809216141700745, |
|
"learning_rate": 9.02439024390244e-06, |
|
"loss": 2.1675, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.10569105691056911, |
|
"grad_norm": 0.6825043559074402, |
|
"learning_rate": 8.983739837398374e-06, |
|
"loss": 2.1462, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.10975609756097561, |
|
"grad_norm": 0.6550068855285645, |
|
"learning_rate": 8.94308943089431e-06, |
|
"loss": 2.1678, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.11382113821138211, |
|
"grad_norm": 0.6182063221931458, |
|
"learning_rate": 8.902439024390244e-06, |
|
"loss": 2.136, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.11788617886178862, |
|
"grad_norm": 0.6627913117408752, |
|
"learning_rate": 8.86178861788618e-06, |
|
"loss": 2.0555, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.12195121951219512, |
|
"grad_norm": 0.6257705688476562, |
|
"learning_rate": 8.821138211382113e-06, |
|
"loss": 2.1095, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12601626016260162, |
|
"grad_norm": 0.5838558673858643, |
|
"learning_rate": 8.78048780487805e-06, |
|
"loss": 2.0794, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.13008130081300814, |
|
"grad_norm": 0.6288259029388428, |
|
"learning_rate": 8.739837398373985e-06, |
|
"loss": 2.1067, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.13414634146341464, |
|
"grad_norm": 0.5768400430679321, |
|
"learning_rate": 8.69918699186992e-06, |
|
"loss": 2.0634, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.13821138211382114, |
|
"grad_norm": 0.6065109372138977, |
|
"learning_rate": 8.658536585365854e-06, |
|
"loss": 2.0407, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.14227642276422764, |
|
"grad_norm": 0.588671088218689, |
|
"learning_rate": 8.617886178861789e-06, |
|
"loss": 2.0634, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.14634146341463414, |
|
"grad_norm": 0.5733758807182312, |
|
"learning_rate": 8.577235772357724e-06, |
|
"loss": 2.044, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.15040650406504066, |
|
"grad_norm": 0.5910205841064453, |
|
"learning_rate": 8.536585365853658e-06, |
|
"loss": 2.0375, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.15447154471544716, |
|
"grad_norm": 0.5399029850959778, |
|
"learning_rate": 8.495934959349595e-06, |
|
"loss": 2.0321, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.15853658536585366, |
|
"grad_norm": 0.6022946238517761, |
|
"learning_rate": 8.45528455284553e-06, |
|
"loss": 2.0283, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.16260162601626016, |
|
"grad_norm": 0.5221655368804932, |
|
"learning_rate": 8.414634146341464e-06, |
|
"loss": 1.9423, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 0.5715436339378357, |
|
"learning_rate": 8.373983739837399e-06, |
|
"loss": 1.896, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.17073170731707318, |
|
"grad_norm": 0.5900686979293823, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.9766, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.17479674796747968, |
|
"grad_norm": 0.4903549551963806, |
|
"learning_rate": 8.292682926829268e-06, |
|
"loss": 1.9507, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.17886178861788618, |
|
"grad_norm": 0.511839747428894, |
|
"learning_rate": 8.252032520325203e-06, |
|
"loss": 1.8829, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.18292682926829268, |
|
"grad_norm": 0.6022465229034424, |
|
"learning_rate": 8.21138211382114e-06, |
|
"loss": 2.0382, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.18699186991869918, |
|
"grad_norm": 0.5093381404876709, |
|
"learning_rate": 8.170731707317073e-06, |
|
"loss": 1.9458, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.1910569105691057, |
|
"grad_norm": 0.5268444418907166, |
|
"learning_rate": 8.130081300813009e-06, |
|
"loss": 1.9686, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1951219512195122, |
|
"grad_norm": 0.47973573207855225, |
|
"learning_rate": 8.089430894308944e-06, |
|
"loss": 1.9357, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1991869918699187, |
|
"grad_norm": 0.4572773575782776, |
|
"learning_rate": 8.048780487804879e-06, |
|
"loss": 1.9086, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2032520325203252, |
|
"grad_norm": 0.4701896011829376, |
|
"learning_rate": 8.008130081300813e-06, |
|
"loss": 1.9454, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2073170731707317, |
|
"grad_norm": 0.49787646532058716, |
|
"learning_rate": 7.967479674796748e-06, |
|
"loss": 1.9274, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.21138211382113822, |
|
"grad_norm": 0.450969398021698, |
|
"learning_rate": 7.926829268292685e-06, |
|
"loss": 1.9194, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.21544715447154472, |
|
"grad_norm": 0.4827839732170105, |
|
"learning_rate": 7.886178861788618e-06, |
|
"loss": 1.8947, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.21951219512195122, |
|
"grad_norm": 0.47519227862358093, |
|
"learning_rate": 7.845528455284554e-06, |
|
"loss": 1.919, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.22357723577235772, |
|
"grad_norm": 0.4576883614063263, |
|
"learning_rate": 7.804878048780489e-06, |
|
"loss": 1.9011, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.22764227642276422, |
|
"grad_norm": 0.4596790671348572, |
|
"learning_rate": 7.764227642276424e-06, |
|
"loss": 1.9009, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.23170731707317074, |
|
"grad_norm": 0.46299463510513306, |
|
"learning_rate": 7.723577235772358e-06, |
|
"loss": 1.9214, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.23577235772357724, |
|
"grad_norm": 0.42920413613319397, |
|
"learning_rate": 7.682926829268293e-06, |
|
"loss": 1.8673, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.23983739837398374, |
|
"grad_norm": 0.454936146736145, |
|
"learning_rate": 7.64227642276423e-06, |
|
"loss": 1.8651, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.24390243902439024, |
|
"grad_norm": 0.4699499309062958, |
|
"learning_rate": 7.601626016260163e-06, |
|
"loss": 1.8484, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.24796747967479674, |
|
"grad_norm": 0.4237484335899353, |
|
"learning_rate": 7.560975609756098e-06, |
|
"loss": 1.8947, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.25203252032520324, |
|
"grad_norm": 0.4287896156311035, |
|
"learning_rate": 7.520325203252034e-06, |
|
"loss": 1.8491, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.25609756097560976, |
|
"grad_norm": 0.45891255140304565, |
|
"learning_rate": 7.4796747967479676e-06, |
|
"loss": 1.8436, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.2601626016260163, |
|
"grad_norm": 0.4400513470172882, |
|
"learning_rate": 7.439024390243903e-06, |
|
"loss": 1.8479, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.26422764227642276, |
|
"grad_norm": 0.42221692204475403, |
|
"learning_rate": 7.398373983739838e-06, |
|
"loss": 1.853, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2682926829268293, |
|
"grad_norm": 0.4163649380207062, |
|
"learning_rate": 7.357723577235773e-06, |
|
"loss": 1.8083, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.27235772357723576, |
|
"grad_norm": 0.4221528172492981, |
|
"learning_rate": 7.317073170731707e-06, |
|
"loss": 1.8727, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.2764227642276423, |
|
"grad_norm": 0.4102783799171448, |
|
"learning_rate": 7.276422764227643e-06, |
|
"loss": 1.8416, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2804878048780488, |
|
"grad_norm": 0.42232567071914673, |
|
"learning_rate": 7.2357723577235786e-06, |
|
"loss": 1.8362, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2845528455284553, |
|
"grad_norm": 0.4951733946800232, |
|
"learning_rate": 7.1951219512195125e-06, |
|
"loss": 1.8302, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2886178861788618, |
|
"grad_norm": 0.44171613454818726, |
|
"learning_rate": 7.154471544715448e-06, |
|
"loss": 1.8404, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2926829268292683, |
|
"grad_norm": 0.4147375524044037, |
|
"learning_rate": 7.113821138211383e-06, |
|
"loss": 1.7993, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2967479674796748, |
|
"grad_norm": 0.41523224115371704, |
|
"learning_rate": 7.0731707317073175e-06, |
|
"loss": 1.7972, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3008130081300813, |
|
"grad_norm": 0.4788653552532196, |
|
"learning_rate": 7.032520325203252e-06, |
|
"loss": 1.7999, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3048780487804878, |
|
"grad_norm": 0.41081106662750244, |
|
"learning_rate": 6.991869918699188e-06, |
|
"loss": 1.8774, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3089430894308943, |
|
"grad_norm": 0.45427748560905457, |
|
"learning_rate": 6.951219512195122e-06, |
|
"loss": 1.7976, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3130081300813008, |
|
"grad_norm": 0.4211747944355011, |
|
"learning_rate": 6.910569105691057e-06, |
|
"loss": 1.8265, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3170731707317073, |
|
"grad_norm": 0.482440710067749, |
|
"learning_rate": 6.869918699186993e-06, |
|
"loss": 1.8224, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.32113821138211385, |
|
"grad_norm": 0.4471375346183777, |
|
"learning_rate": 6.829268292682928e-06, |
|
"loss": 1.8346, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.3252032520325203, |
|
"grad_norm": 0.43011748790740967, |
|
"learning_rate": 6.788617886178862e-06, |
|
"loss": 1.8136, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.32926829268292684, |
|
"grad_norm": 0.41438254714012146, |
|
"learning_rate": 6.747967479674797e-06, |
|
"loss": 1.7561, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.43242278695106506, |
|
"learning_rate": 6.707317073170733e-06, |
|
"loss": 1.8202, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.33739837398373984, |
|
"grad_norm": 0.4559822380542755, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.7962, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.34146341463414637, |
|
"grad_norm": 0.41565415263175964, |
|
"learning_rate": 6.626016260162602e-06, |
|
"loss": 1.7748, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.34552845528455284, |
|
"grad_norm": 0.3826720416545868, |
|
"learning_rate": 6.585365853658538e-06, |
|
"loss": 1.7771, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.34959349593495936, |
|
"grad_norm": 0.4084075093269348, |
|
"learning_rate": 6.544715447154472e-06, |
|
"loss": 1.7623, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.35365853658536583, |
|
"grad_norm": 0.4102393686771393, |
|
"learning_rate": 6.504065040650407e-06, |
|
"loss": 1.777, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.35772357723577236, |
|
"grad_norm": 0.47018152475357056, |
|
"learning_rate": 6.463414634146342e-06, |
|
"loss": 1.7526, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3617886178861789, |
|
"grad_norm": 0.4740968942642212, |
|
"learning_rate": 6.422764227642278e-06, |
|
"loss": 1.8063, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.36585365853658536, |
|
"grad_norm": 0.40948763489723206, |
|
"learning_rate": 6.3821138211382115e-06, |
|
"loss": 1.7836, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3699186991869919, |
|
"grad_norm": 0.4181380271911621, |
|
"learning_rate": 6.341463414634147e-06, |
|
"loss": 1.8029, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.37398373983739835, |
|
"grad_norm": 0.478249728679657, |
|
"learning_rate": 6.300813008130082e-06, |
|
"loss": 1.7783, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3780487804878049, |
|
"grad_norm": 0.3988848328590393, |
|
"learning_rate": 6.260162601626017e-06, |
|
"loss": 1.7572, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3821138211382114, |
|
"grad_norm": 0.4052281379699707, |
|
"learning_rate": 6.219512195121951e-06, |
|
"loss": 1.8084, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.3861788617886179, |
|
"grad_norm": 0.5780310034751892, |
|
"learning_rate": 6.178861788617887e-06, |
|
"loss": 1.7885, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3902439024390244, |
|
"grad_norm": 0.3908810019493103, |
|
"learning_rate": 6.138211382113821e-06, |
|
"loss": 1.7261, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.3943089430894309, |
|
"grad_norm": 0.44484543800354004, |
|
"learning_rate": 6.0975609756097564e-06, |
|
"loss": 1.7299, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.3983739837398374, |
|
"grad_norm": 0.48491889238357544, |
|
"learning_rate": 6.056910569105692e-06, |
|
"loss": 1.7355, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4024390243902439, |
|
"grad_norm": 0.4945378005504608, |
|
"learning_rate": 6.016260162601627e-06, |
|
"loss": 1.7955, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4065040650406504, |
|
"grad_norm": 0.44714319705963135, |
|
"learning_rate": 5.9756097560975615e-06, |
|
"loss": 1.7603, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4105691056910569, |
|
"grad_norm": 0.4738294184207916, |
|
"learning_rate": 5.934959349593496e-06, |
|
"loss": 1.7065, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.4146341463414634, |
|
"grad_norm": 0.429187536239624, |
|
"learning_rate": 5.894308943089432e-06, |
|
"loss": 1.7831, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.4186991869918699, |
|
"grad_norm": 0.47387829422950745, |
|
"learning_rate": 5.853658536585366e-06, |
|
"loss": 1.7591, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.42276422764227645, |
|
"grad_norm": 0.4267633557319641, |
|
"learning_rate": 5.813008130081301e-06, |
|
"loss": 1.7755, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.4268292682926829, |
|
"grad_norm": 0.3990177810192108, |
|
"learning_rate": 5.772357723577237e-06, |
|
"loss": 1.7391, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.43089430894308944, |
|
"grad_norm": 0.4822699725627899, |
|
"learning_rate": 5.731707317073171e-06, |
|
"loss": 1.7601, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.4349593495934959, |
|
"grad_norm": 0.464248925447464, |
|
"learning_rate": 5.691056910569106e-06, |
|
"loss": 1.7951, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.43902439024390244, |
|
"grad_norm": 0.432405948638916, |
|
"learning_rate": 5.650406504065041e-06, |
|
"loss": 1.7501, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.44308943089430897, |
|
"grad_norm": 0.4115266501903534, |
|
"learning_rate": 5.609756097560977e-06, |
|
"loss": 1.8129, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.44715447154471544, |
|
"grad_norm": 0.39715027809143066, |
|
"learning_rate": 5.569105691056911e-06, |
|
"loss": 1.736, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.45121951219512196, |
|
"grad_norm": 0.4466915726661682, |
|
"learning_rate": 5.528455284552846e-06, |
|
"loss": 1.7885, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.45528455284552843, |
|
"grad_norm": 0.42750024795532227, |
|
"learning_rate": 5.487804878048781e-06, |
|
"loss": 1.7642, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.45934959349593496, |
|
"grad_norm": 0.41643035411834717, |
|
"learning_rate": 5.447154471544716e-06, |
|
"loss": 1.7121, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.4634146341463415, |
|
"grad_norm": 0.4076242744922638, |
|
"learning_rate": 5.4065040650406504e-06, |
|
"loss": 1.7574, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.46747967479674796, |
|
"grad_norm": 0.45081648230552673, |
|
"learning_rate": 5.365853658536586e-06, |
|
"loss": 1.7505, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.4715447154471545, |
|
"grad_norm": 0.4501321017742157, |
|
"learning_rate": 5.32520325203252e-06, |
|
"loss": 1.7157, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.47560975609756095, |
|
"grad_norm": 0.4324929714202881, |
|
"learning_rate": 5.2845528455284555e-06, |
|
"loss": 1.763, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.4796747967479675, |
|
"grad_norm": 0.49811163544654846, |
|
"learning_rate": 5.243902439024391e-06, |
|
"loss": 1.7487, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.483739837398374, |
|
"grad_norm": 0.43030911684036255, |
|
"learning_rate": 5.203252032520326e-06, |
|
"loss": 1.7058, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.4878048780487805, |
|
"grad_norm": 0.41700655221939087, |
|
"learning_rate": 5.162601626016261e-06, |
|
"loss": 1.7244, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.491869918699187, |
|
"grad_norm": 0.4175284504890442, |
|
"learning_rate": 5.121951219512195e-06, |
|
"loss": 1.7248, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.4959349593495935, |
|
"grad_norm": 0.4269621670246124, |
|
"learning_rate": 5.081300813008131e-06, |
|
"loss": 1.6854, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.4624629616737366, |
|
"learning_rate": 5.040650406504065e-06, |
|
"loss": 1.7449, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5040650406504065, |
|
"grad_norm": 0.46035370230674744, |
|
"learning_rate": 5e-06, |
|
"loss": 1.6939, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.508130081300813, |
|
"grad_norm": 0.4211556017398834, |
|
"learning_rate": 4.959349593495935e-06, |
|
"loss": 1.6919, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5121951219512195, |
|
"grad_norm": 0.40321481227874756, |
|
"learning_rate": 4.918699186991871e-06, |
|
"loss": 1.727, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.516260162601626, |
|
"grad_norm": 0.45469698309898376, |
|
"learning_rate": 4.8780487804878055e-06, |
|
"loss": 1.6792, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5203252032520326, |
|
"grad_norm": 0.4928475022315979, |
|
"learning_rate": 4.83739837398374e-06, |
|
"loss": 1.713, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.524390243902439, |
|
"grad_norm": 0.3949720561504364, |
|
"learning_rate": 4.796747967479675e-06, |
|
"loss": 1.7213, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5284552845528455, |
|
"grad_norm": 0.6022332906723022, |
|
"learning_rate": 4.75609756097561e-06, |
|
"loss": 1.6646, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.532520325203252, |
|
"grad_norm": 0.4028625786304474, |
|
"learning_rate": 4.715447154471545e-06, |
|
"loss": 1.6939, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5365853658536586, |
|
"grad_norm": 0.43009862303733826, |
|
"learning_rate": 4.67479674796748e-06, |
|
"loss": 1.6919, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.540650406504065, |
|
"grad_norm": 0.4282652735710144, |
|
"learning_rate": 4.634146341463416e-06, |
|
"loss": 1.7312, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.5447154471544715, |
|
"grad_norm": 0.4238031506538391, |
|
"learning_rate": 4.59349593495935e-06, |
|
"loss": 1.6688, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.5487804878048781, |
|
"grad_norm": 0.474398136138916, |
|
"learning_rate": 4.552845528455285e-06, |
|
"loss": 1.7131, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5528455284552846, |
|
"grad_norm": 0.4145808219909668, |
|
"learning_rate": 4.51219512195122e-06, |
|
"loss": 1.7134, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.556910569105691, |
|
"grad_norm": 0.5533434748649597, |
|
"learning_rate": 4.471544715447155e-06, |
|
"loss": 1.7191, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.5609756097560976, |
|
"grad_norm": 0.4589686095714569, |
|
"learning_rate": 4.43089430894309e-06, |
|
"loss": 1.6795, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.5650406504065041, |
|
"grad_norm": 0.42821556329727173, |
|
"learning_rate": 4.390243902439025e-06, |
|
"loss": 1.7132, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.5691056910569106, |
|
"grad_norm": 0.5176672339439392, |
|
"learning_rate": 4.34959349593496e-06, |
|
"loss": 1.6831, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.573170731707317, |
|
"grad_norm": 0.45410770177841187, |
|
"learning_rate": 4.308943089430894e-06, |
|
"loss": 1.6613, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.5772357723577236, |
|
"grad_norm": 0.4067547023296356, |
|
"learning_rate": 4.268292682926829e-06, |
|
"loss": 1.706, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.5813008130081301, |
|
"grad_norm": 0.4407687187194824, |
|
"learning_rate": 4.227642276422765e-06, |
|
"loss": 1.7168, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.5853658536585366, |
|
"grad_norm": 0.4558040201663971, |
|
"learning_rate": 4.1869918699186995e-06, |
|
"loss": 1.6783, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.5894308943089431, |
|
"grad_norm": 0.47496524453163147, |
|
"learning_rate": 4.146341463414634e-06, |
|
"loss": 1.6927, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5934959349593496, |
|
"grad_norm": 0.4712445139884949, |
|
"learning_rate": 4.10569105691057e-06, |
|
"loss": 1.7105, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.5975609756097561, |
|
"grad_norm": 0.527227520942688, |
|
"learning_rate": 4.0650406504065046e-06, |
|
"loss": 1.7476, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6016260162601627, |
|
"grad_norm": 0.5302590131759644, |
|
"learning_rate": 4.024390243902439e-06, |
|
"loss": 1.7037, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6056910569105691, |
|
"grad_norm": 0.4613666534423828, |
|
"learning_rate": 3.983739837398374e-06, |
|
"loss": 1.7034, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6097560975609756, |
|
"grad_norm": 0.43386778235435486, |
|
"learning_rate": 3.943089430894309e-06, |
|
"loss": 1.7165, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6138211382113821, |
|
"grad_norm": 0.4549228847026825, |
|
"learning_rate": 3.902439024390244e-06, |
|
"loss": 1.7262, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6178861788617886, |
|
"grad_norm": 0.4825079143047333, |
|
"learning_rate": 3.861788617886179e-06, |
|
"loss": 1.6685, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.6219512195121951, |
|
"grad_norm": 0.45418936014175415, |
|
"learning_rate": 3.821138211382115e-06, |
|
"loss": 1.6906, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6260162601626016, |
|
"grad_norm": 0.47858789563179016, |
|
"learning_rate": 3.780487804878049e-06, |
|
"loss": 1.6736, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.6300813008130082, |
|
"grad_norm": 0.4218636751174927, |
|
"learning_rate": 3.7398373983739838e-06, |
|
"loss": 1.6743, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6341463414634146, |
|
"grad_norm": 0.42544764280319214, |
|
"learning_rate": 3.699186991869919e-06, |
|
"loss": 1.6787, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.6382113821138211, |
|
"grad_norm": 0.4026515781879425, |
|
"learning_rate": 3.6585365853658537e-06, |
|
"loss": 1.7407, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.6422764227642277, |
|
"grad_norm": 0.5130017399787903, |
|
"learning_rate": 3.6178861788617893e-06, |
|
"loss": 1.6421, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.6463414634146342, |
|
"grad_norm": 0.4023183584213257, |
|
"learning_rate": 3.577235772357724e-06, |
|
"loss": 1.6691, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.6504065040650406, |
|
"grad_norm": 0.4857276678085327, |
|
"learning_rate": 3.5365853658536588e-06, |
|
"loss": 1.7019, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6544715447154471, |
|
"grad_norm": 0.4183124601840973, |
|
"learning_rate": 3.495934959349594e-06, |
|
"loss": 1.6345, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.6585365853658537, |
|
"grad_norm": 0.5401263236999512, |
|
"learning_rate": 3.4552845528455287e-06, |
|
"loss": 1.6823, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.6626016260162602, |
|
"grad_norm": 0.4167156219482422, |
|
"learning_rate": 3.414634146341464e-06, |
|
"loss": 1.6945, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.5526067018508911, |
|
"learning_rate": 3.3739837398373986e-06, |
|
"loss": 1.69, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.6707317073170732, |
|
"grad_norm": 0.43190276622772217, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.6771, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6747967479674797, |
|
"grad_norm": 0.48610788583755493, |
|
"learning_rate": 3.292682926829269e-06, |
|
"loss": 1.6998, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.6788617886178862, |
|
"grad_norm": 0.4015091359615326, |
|
"learning_rate": 3.2520325203252037e-06, |
|
"loss": 1.6238, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.6829268292682927, |
|
"grad_norm": 0.40037909150123596, |
|
"learning_rate": 3.211382113821139e-06, |
|
"loss": 1.6885, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.6869918699186992, |
|
"grad_norm": 0.4132348299026489, |
|
"learning_rate": 3.1707317073170736e-06, |
|
"loss": 1.6955, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.6910569105691057, |
|
"grad_norm": 0.5056420564651489, |
|
"learning_rate": 3.1300813008130083e-06, |
|
"loss": 1.6426, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6951219512195121, |
|
"grad_norm": 0.4648841619491577, |
|
"learning_rate": 3.0894308943089435e-06, |
|
"loss": 1.66, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.6991869918699187, |
|
"grad_norm": 0.4295191168785095, |
|
"learning_rate": 3.0487804878048782e-06, |
|
"loss": 1.5901, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.7032520325203252, |
|
"grad_norm": 0.42674049735069275, |
|
"learning_rate": 3.0081300813008134e-06, |
|
"loss": 1.6857, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.7073170731707317, |
|
"grad_norm": 0.40749216079711914, |
|
"learning_rate": 2.967479674796748e-06, |
|
"loss": 1.6882, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7113821138211383, |
|
"grad_norm": 0.4567708969116211, |
|
"learning_rate": 2.926829268292683e-06, |
|
"loss": 1.6784, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7154471544715447, |
|
"grad_norm": 0.5232097506523132, |
|
"learning_rate": 2.8861788617886185e-06, |
|
"loss": 1.6867, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.7195121951219512, |
|
"grad_norm": 0.4262886047363281, |
|
"learning_rate": 2.845528455284553e-06, |
|
"loss": 1.6745, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.7235772357723578, |
|
"grad_norm": 0.4635554850101471, |
|
"learning_rate": 2.8048780487804884e-06, |
|
"loss": 1.6693, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.7276422764227642, |
|
"grad_norm": 0.4350730776786804, |
|
"learning_rate": 2.764227642276423e-06, |
|
"loss": 1.6796, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.7317073170731707, |
|
"grad_norm": 0.4890363812446594, |
|
"learning_rate": 2.723577235772358e-06, |
|
"loss": 1.6825, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7357723577235772, |
|
"grad_norm": 0.42314425110816956, |
|
"learning_rate": 2.682926829268293e-06, |
|
"loss": 1.654, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.7398373983739838, |
|
"grad_norm": 0.4569300413131714, |
|
"learning_rate": 2.6422764227642278e-06, |
|
"loss": 1.6615, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.7439024390243902, |
|
"grad_norm": 0.5300145149230957, |
|
"learning_rate": 2.601626016260163e-06, |
|
"loss": 1.6732, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.7479674796747967, |
|
"grad_norm": 0.4384273886680603, |
|
"learning_rate": 2.5609756097560977e-06, |
|
"loss": 1.6822, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.7520325203252033, |
|
"grad_norm": 0.40823447704315186, |
|
"learning_rate": 2.5203252032520324e-06, |
|
"loss": 1.6947, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.7560975609756098, |
|
"grad_norm": 0.44767969846725464, |
|
"learning_rate": 2.4796747967479676e-06, |
|
"loss": 1.7653, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.7601626016260162, |
|
"grad_norm": 0.42057687044143677, |
|
"learning_rate": 2.4390243902439027e-06, |
|
"loss": 1.6582, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.7642276422764228, |
|
"grad_norm": 0.6323032379150391, |
|
"learning_rate": 2.3983739837398375e-06, |
|
"loss": 1.6377, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.7682926829268293, |
|
"grad_norm": 0.46569883823394775, |
|
"learning_rate": 2.3577235772357727e-06, |
|
"loss": 1.6864, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.7723577235772358, |
|
"grad_norm": 0.4475976228713989, |
|
"learning_rate": 2.317073170731708e-06, |
|
"loss": 1.6995, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7764227642276422, |
|
"grad_norm": 0.42551514506340027, |
|
"learning_rate": 2.2764227642276426e-06, |
|
"loss": 1.6745, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.7804878048780488, |
|
"grad_norm": 0.4114595055580139, |
|
"learning_rate": 2.2357723577235773e-06, |
|
"loss": 1.6414, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.7845528455284553, |
|
"grad_norm": 0.4425772726535797, |
|
"learning_rate": 2.1951219512195125e-06, |
|
"loss": 1.668, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.7886178861788617, |
|
"grad_norm": 0.4178754985332489, |
|
"learning_rate": 2.154471544715447e-06, |
|
"loss": 1.6172, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.7926829268292683, |
|
"grad_norm": 0.526268482208252, |
|
"learning_rate": 2.1138211382113824e-06, |
|
"loss": 1.6877, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.7967479674796748, |
|
"grad_norm": 0.46816593408584595, |
|
"learning_rate": 2.073170731707317e-06, |
|
"loss": 1.6405, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.8008130081300813, |
|
"grad_norm": 0.42700088024139404, |
|
"learning_rate": 2.0325203252032523e-06, |
|
"loss": 1.6716, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8048780487804879, |
|
"grad_norm": 0.46738067269325256, |
|
"learning_rate": 1.991869918699187e-06, |
|
"loss": 1.6887, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.8089430894308943, |
|
"grad_norm": 0.4532390534877777, |
|
"learning_rate": 1.951219512195122e-06, |
|
"loss": 1.6998, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.8130081300813008, |
|
"grad_norm": 0.42802944779396057, |
|
"learning_rate": 1.9105691056910574e-06, |
|
"loss": 1.6087, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8170731707317073, |
|
"grad_norm": 0.547961413860321, |
|
"learning_rate": 1.8699186991869919e-06, |
|
"loss": 1.6467, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.8211382113821138, |
|
"grad_norm": 0.4223916530609131, |
|
"learning_rate": 1.8292682926829268e-06, |
|
"loss": 1.6578, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.8252032520325203, |
|
"grad_norm": 0.5189731121063232, |
|
"learning_rate": 1.788617886178862e-06, |
|
"loss": 1.7209, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.8292682926829268, |
|
"grad_norm": 0.45684292912483215, |
|
"learning_rate": 1.747967479674797e-06, |
|
"loss": 1.6436, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 0.4165087640285492, |
|
"learning_rate": 1.707317073170732e-06, |
|
"loss": 1.5974, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.8373983739837398, |
|
"grad_norm": 0.4212961196899414, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 1.6543, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.8414634146341463, |
|
"grad_norm": 0.43846744298934937, |
|
"learning_rate": 1.6260162601626018e-06, |
|
"loss": 1.6136, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.8455284552845529, |
|
"grad_norm": 0.6994388699531555, |
|
"learning_rate": 1.5853658536585368e-06, |
|
"loss": 1.7406, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.8495934959349594, |
|
"grad_norm": 0.5141953825950623, |
|
"learning_rate": 1.5447154471544717e-06, |
|
"loss": 1.6644, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.8536585365853658, |
|
"grad_norm": 0.49255314469337463, |
|
"learning_rate": 1.5040650406504067e-06, |
|
"loss": 1.6755, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8577235772357723, |
|
"grad_norm": 0.5186207890510559, |
|
"learning_rate": 1.4634146341463414e-06, |
|
"loss": 1.5835, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.8617886178861789, |
|
"grad_norm": 0.41615086793899536, |
|
"learning_rate": 1.4227642276422766e-06, |
|
"loss": 1.6537, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.8658536585365854, |
|
"grad_norm": 0.5542620420455933, |
|
"learning_rate": 1.3821138211382116e-06, |
|
"loss": 1.642, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.8699186991869918, |
|
"grad_norm": 0.4988357424736023, |
|
"learning_rate": 1.3414634146341465e-06, |
|
"loss": 1.621, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.8739837398373984, |
|
"grad_norm": 0.3991510272026062, |
|
"learning_rate": 1.3008130081300815e-06, |
|
"loss": 1.6462, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.8780487804878049, |
|
"grad_norm": 0.3977624475955963, |
|
"learning_rate": 1.2601626016260162e-06, |
|
"loss": 1.6328, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.8821138211382114, |
|
"grad_norm": 0.4829718768596649, |
|
"learning_rate": 1.2195121951219514e-06, |
|
"loss": 1.6878, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.8861788617886179, |
|
"grad_norm": 0.5087659358978271, |
|
"learning_rate": 1.1788617886178863e-06, |
|
"loss": 1.6832, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.8902439024390244, |
|
"grad_norm": 0.4450823664665222, |
|
"learning_rate": 1.1382113821138213e-06, |
|
"loss": 1.6896, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.8943089430894309, |
|
"grad_norm": 0.5218344926834106, |
|
"learning_rate": 1.0975609756097562e-06, |
|
"loss": 1.6422, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.8983739837398373, |
|
"grad_norm": 0.43621551990509033, |
|
"learning_rate": 1.0569105691056912e-06, |
|
"loss": 1.6496, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.9024390243902439, |
|
"grad_norm": 0.5574150085449219, |
|
"learning_rate": 1.0162601626016261e-06, |
|
"loss": 1.6791, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.9065040650406504, |
|
"grad_norm": 0.47324275970458984, |
|
"learning_rate": 9.75609756097561e-07, |
|
"loss": 1.6603, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.9105691056910569, |
|
"grad_norm": 0.5460381507873535, |
|
"learning_rate": 9.349593495934959e-07, |
|
"loss": 1.6776, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.9146341463414634, |
|
"grad_norm": 0.506424069404602, |
|
"learning_rate": 8.94308943089431e-07, |
|
"loss": 1.6737, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.9186991869918699, |
|
"grad_norm": 0.48858654499053955, |
|
"learning_rate": 8.53658536585366e-07, |
|
"loss": 1.6779, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.9227642276422764, |
|
"grad_norm": 0.43115416169166565, |
|
"learning_rate": 8.130081300813009e-07, |
|
"loss": 1.6291, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.926829268292683, |
|
"grad_norm": 0.5290559530258179, |
|
"learning_rate": 7.723577235772359e-07, |
|
"loss": 1.6664, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.9308943089430894, |
|
"grad_norm": 0.3979451358318329, |
|
"learning_rate": 7.317073170731707e-07, |
|
"loss": 1.6533, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.9349593495934959, |
|
"grad_norm": 0.437351256608963, |
|
"learning_rate": 6.910569105691058e-07, |
|
"loss": 1.6444, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.9390243902439024, |
|
"grad_norm": 0.41036128997802734, |
|
"learning_rate": 6.504065040650407e-07, |
|
"loss": 1.653, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.943089430894309, |
|
"grad_norm": 0.47654396295547485, |
|
"learning_rate": 6.097560975609757e-07, |
|
"loss": 1.6934, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.9471544715447154, |
|
"grad_norm": 0.467517614364624, |
|
"learning_rate": 5.691056910569106e-07, |
|
"loss": 1.6268, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.9512195121951219, |
|
"grad_norm": 0.44289228320121765, |
|
"learning_rate": 5.284552845528456e-07, |
|
"loss": 1.5799, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.9552845528455285, |
|
"grad_norm": 0.4003511369228363, |
|
"learning_rate": 4.878048780487805e-07, |
|
"loss": 1.6795, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.959349593495935, |
|
"grad_norm": 0.39896637201309204, |
|
"learning_rate": 4.471544715447155e-07, |
|
"loss": 1.6185, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.9634146341463414, |
|
"grad_norm": 0.4294638931751251, |
|
"learning_rate": 4.0650406504065046e-07, |
|
"loss": 1.6522, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.967479674796748, |
|
"grad_norm": 0.46294713020324707, |
|
"learning_rate": 3.6585365853658536e-07, |
|
"loss": 1.6313, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.9715447154471545, |
|
"grad_norm": 0.43310508131980896, |
|
"learning_rate": 3.2520325203252037e-07, |
|
"loss": 1.6483, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.975609756097561, |
|
"grad_norm": 0.46582019329071045, |
|
"learning_rate": 2.845528455284553e-07, |
|
"loss": 1.6394, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.9796747967479674, |
|
"grad_norm": 0.44841283559799194, |
|
"learning_rate": 2.439024390243903e-07, |
|
"loss": 1.6587, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.983739837398374, |
|
"grad_norm": 0.4253198802471161, |
|
"learning_rate": 2.0325203252032523e-07, |
|
"loss": 1.6218, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.9878048780487805, |
|
"grad_norm": 0.402408242225647, |
|
"learning_rate": 1.6260162601626018e-07, |
|
"loss": 1.6548, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.991869918699187, |
|
"grad_norm": 0.3985183835029602, |
|
"learning_rate": 1.2195121951219514e-07, |
|
"loss": 1.6401, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.9959349593495935, |
|
"grad_norm": 0.5187343955039978, |
|
"learning_rate": 8.130081300813009e-08, |
|
"loss": 1.6755, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.4178217053413391, |
|
"learning_rate": 4.0650406504065046e-08, |
|
"loss": 1.5977, |
|
"step": 246 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 246, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 0, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.0971527577075712e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|