|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.98971898560658, |
|
"eval_steps": 500, |
|
"global_step": 910, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0054832076764907475, |
|
"grad_norm": 604.3109811491834, |
|
"learning_rate": 8.791208791208792e-07, |
|
"loss": 3.4565, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.010966415352981495, |
|
"grad_norm": 602.1736348957847, |
|
"learning_rate": 1.7582417582417585e-06, |
|
"loss": 3.445, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01644962302947224, |
|
"grad_norm": 572.1582926794415, |
|
"learning_rate": 2.6373626373626375e-06, |
|
"loss": 3.426, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02193283070596299, |
|
"grad_norm": 301.9672730610099, |
|
"learning_rate": 3.516483516483517e-06, |
|
"loss": 3.2554, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.027416038382453736, |
|
"grad_norm": 157.4890153555457, |
|
"learning_rate": 4.395604395604396e-06, |
|
"loss": 2.9123, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03289924605894448, |
|
"grad_norm": 93.35477967830393, |
|
"learning_rate": 5.274725274725275e-06, |
|
"loss": 2.6655, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03838245373543523, |
|
"grad_norm": 23.960002925273315, |
|
"learning_rate": 6.153846153846155e-06, |
|
"loss": 2.526, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04386566141192598, |
|
"grad_norm": 16.617925535315912, |
|
"learning_rate": 7.032967032967034e-06, |
|
"loss": 2.3244, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04934886908841672, |
|
"grad_norm": 13.65445534016627, |
|
"learning_rate": 7.912087912087913e-06, |
|
"loss": 2.2167, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.05483207676490747, |
|
"grad_norm": 23.60213319182947, |
|
"learning_rate": 8.791208791208792e-06, |
|
"loss": 2.1214, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06031528444139822, |
|
"grad_norm": 9.750741286283983, |
|
"learning_rate": 9.670329670329671e-06, |
|
"loss": 2.0631, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.06579849211788896, |
|
"grad_norm": 13.69478328276252, |
|
"learning_rate": 1.054945054945055e-05, |
|
"loss": 2.0035, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.07128169979437972, |
|
"grad_norm": 8.863133767549522, |
|
"learning_rate": 1.1428571428571429e-05, |
|
"loss": 1.9018, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.07676490747087046, |
|
"grad_norm": 6.184273756096322, |
|
"learning_rate": 1.230769230769231e-05, |
|
"loss": 1.8409, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0822481151473612, |
|
"grad_norm": 8.463726907785716, |
|
"learning_rate": 1.3186813186813187e-05, |
|
"loss": 1.7862, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08773132282385196, |
|
"grad_norm": 5.9606779223424455, |
|
"learning_rate": 1.4065934065934068e-05, |
|
"loss": 1.7449, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0932145305003427, |
|
"grad_norm": 18.546012018929744, |
|
"learning_rate": 1.4945054945054947e-05, |
|
"loss": 1.7071, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.09869773817683344, |
|
"grad_norm": 9.23610239091234, |
|
"learning_rate": 1.5824175824175826e-05, |
|
"loss": 1.7254, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1041809458533242, |
|
"grad_norm": 44.47081850643636, |
|
"learning_rate": 1.6703296703296707e-05, |
|
"loss": 1.6708, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.10966415352981494, |
|
"grad_norm": 29.350706185079904, |
|
"learning_rate": 1.7582417582417584e-05, |
|
"loss": 1.7401, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.11514736120630568, |
|
"grad_norm": 7.168404792591547, |
|
"learning_rate": 1.8461538461538465e-05, |
|
"loss": 1.6881, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.12063056888279644, |
|
"grad_norm": 28.75909337885957, |
|
"learning_rate": 1.9340659340659342e-05, |
|
"loss": 1.5999, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.12611377655928718, |
|
"grad_norm": 12.270722231807452, |
|
"learning_rate": 2.021978021978022e-05, |
|
"loss": 1.7174, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.13159698423577793, |
|
"grad_norm": 51.182199832000634, |
|
"learning_rate": 2.10989010989011e-05, |
|
"loss": 1.6164, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.13708019191226867, |
|
"grad_norm": 5.983294276939527, |
|
"learning_rate": 2.197802197802198e-05, |
|
"loss": 1.6279, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.14256339958875944, |
|
"grad_norm": 12.36006380527371, |
|
"learning_rate": 2.2857142857142858e-05, |
|
"loss": 1.5335, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.14804660726525018, |
|
"grad_norm": 3.912477135556689, |
|
"learning_rate": 2.373626373626374e-05, |
|
"loss": 1.5637, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.15352981494174092, |
|
"grad_norm": 8.798850308211108, |
|
"learning_rate": 2.461538461538462e-05, |
|
"loss": 1.546, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.15901302261823166, |
|
"grad_norm": 6.133566343370423, |
|
"learning_rate": 2.5494505494505493e-05, |
|
"loss": 1.5493, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1644962302947224, |
|
"grad_norm": 3.0393729351601753, |
|
"learning_rate": 2.6373626373626374e-05, |
|
"loss": 1.4988, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.16997943797121315, |
|
"grad_norm": 28.575328415461772, |
|
"learning_rate": 2.7252747252747255e-05, |
|
"loss": 1.5026, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.17546264564770392, |
|
"grad_norm": 27.101171796307156, |
|
"learning_rate": 2.8131868131868136e-05, |
|
"loss": 1.6522, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.18094585332419466, |
|
"grad_norm": 7.286329527124996, |
|
"learning_rate": 2.9010989010989013e-05, |
|
"loss": 1.6616, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.1864290610006854, |
|
"grad_norm": 4.786509337900977, |
|
"learning_rate": 2.9890109890109894e-05, |
|
"loss": 1.495, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.19191226867717615, |
|
"grad_norm": 2.804143471750863, |
|
"learning_rate": 3.0769230769230774e-05, |
|
"loss": 1.4495, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1973954763536669, |
|
"grad_norm": 9.304401113296121, |
|
"learning_rate": 3.164835164835165e-05, |
|
"loss": 1.4403, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.20287868403015763, |
|
"grad_norm": 5.457009814887685, |
|
"learning_rate": 3.252747252747253e-05, |
|
"loss": 1.4336, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.2083618917066484, |
|
"grad_norm": 3.7957565393477437, |
|
"learning_rate": 3.340659340659341e-05, |
|
"loss": 1.4243, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.21384509938313914, |
|
"grad_norm": 4.9220861710974075, |
|
"learning_rate": 3.4285714285714284e-05, |
|
"loss": 1.3984, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.21932830705962988, |
|
"grad_norm": 4.736412206172175, |
|
"learning_rate": 3.516483516483517e-05, |
|
"loss": 1.3668, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.22481151473612063, |
|
"grad_norm": 5.581594123475227, |
|
"learning_rate": 3.6043956043956045e-05, |
|
"loss": 1.3513, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.23029472241261137, |
|
"grad_norm": 3.5585171099348427, |
|
"learning_rate": 3.692307692307693e-05, |
|
"loss": 1.3557, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.2357779300891021, |
|
"grad_norm": 5.217567824661255, |
|
"learning_rate": 3.7802197802197807e-05, |
|
"loss": 1.3368, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.24126113776559288, |
|
"grad_norm": 3.8941164699166784, |
|
"learning_rate": 3.8681318681318684e-05, |
|
"loss": 1.3192, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.24674434544208362, |
|
"grad_norm": 3.3715823234615345, |
|
"learning_rate": 3.956043956043957e-05, |
|
"loss": 1.3194, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.25222755311857437, |
|
"grad_norm": 4.071045479574351, |
|
"learning_rate": 4.043956043956044e-05, |
|
"loss": 1.3, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.2577107607950651, |
|
"grad_norm": 4.790439556324944, |
|
"learning_rate": 4.131868131868133e-05, |
|
"loss": 1.3051, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.26319396847155585, |
|
"grad_norm": 3.6760783714364793, |
|
"learning_rate": 4.21978021978022e-05, |
|
"loss": 1.2885, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.2686771761480466, |
|
"grad_norm": 3.19497049785778, |
|
"learning_rate": 4.307692307692308e-05, |
|
"loss": 1.2962, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.27416038382453733, |
|
"grad_norm": 2.620333577151409, |
|
"learning_rate": 4.395604395604396e-05, |
|
"loss": 1.2452, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2796435915010281, |
|
"grad_norm": 3.573140694736915, |
|
"learning_rate": 4.483516483516484e-05, |
|
"loss": 1.2912, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2851267991775189, |
|
"grad_norm": 2.3723854257821513, |
|
"learning_rate": 4.5714285714285716e-05, |
|
"loss": 1.2468, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.2906100068540096, |
|
"grad_norm": 2.5628938638391277, |
|
"learning_rate": 4.65934065934066e-05, |
|
"loss": 1.2883, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.29609321453050036, |
|
"grad_norm": 3.2477927500052184, |
|
"learning_rate": 4.747252747252748e-05, |
|
"loss": 1.2864, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.3015764222069911, |
|
"grad_norm": 2.4110262583540716, |
|
"learning_rate": 4.8351648351648355e-05, |
|
"loss": 1.2886, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.30705962988348184, |
|
"grad_norm": 3.9054257622250614, |
|
"learning_rate": 4.923076923076924e-05, |
|
"loss": 1.2544, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.3125428375599726, |
|
"grad_norm": 2.684900989864461, |
|
"learning_rate": 5.0109890109890116e-05, |
|
"loss": 1.2506, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.31802604523646333, |
|
"grad_norm": 3.1294292463297673, |
|
"learning_rate": 5.098901098901099e-05, |
|
"loss": 1.2619, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.32350925291295407, |
|
"grad_norm": 4.302163409683541, |
|
"learning_rate": 5.186813186813188e-05, |
|
"loss": 1.2531, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.3289924605894448, |
|
"grad_norm": 2.3198593094902566, |
|
"learning_rate": 5.274725274725275e-05, |
|
"loss": 1.2444, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.33447566826593556, |
|
"grad_norm": 3.075723813655852, |
|
"learning_rate": 5.3626373626373626e-05, |
|
"loss": 1.2318, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.3399588759424263, |
|
"grad_norm": 2.751318001428128, |
|
"learning_rate": 5.450549450549451e-05, |
|
"loss": 1.2283, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.34544208361891704, |
|
"grad_norm": 3.792517967186425, |
|
"learning_rate": 5.538461538461539e-05, |
|
"loss": 1.2347, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.35092529129540784, |
|
"grad_norm": 2.502677006785022, |
|
"learning_rate": 5.626373626373627e-05, |
|
"loss": 1.2154, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.3564084989718986, |
|
"grad_norm": 2.4447089296161955, |
|
"learning_rate": 5.714285714285715e-05, |
|
"loss": 1.2062, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3618917066483893, |
|
"grad_norm": 2.7612204760550183, |
|
"learning_rate": 5.8021978021978026e-05, |
|
"loss": 1.2105, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.36737491432488006, |
|
"grad_norm": 3.3730773809799968, |
|
"learning_rate": 5.890109890109891e-05, |
|
"loss": 1.2129, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3728581220013708, |
|
"grad_norm": 2.067462539301751, |
|
"learning_rate": 5.978021978021979e-05, |
|
"loss": 1.2021, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.37834132967786155, |
|
"grad_norm": 3.367733224494582, |
|
"learning_rate": 6.0659340659340665e-05, |
|
"loss": 1.2207, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.3838245373543523, |
|
"grad_norm": 2.1443388252000335, |
|
"learning_rate": 6.153846153846155e-05, |
|
"loss": 1.2025, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.38930774503084303, |
|
"grad_norm": 3.4158140446082994, |
|
"learning_rate": 6.241758241758242e-05, |
|
"loss": 1.2132, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3947909527073338, |
|
"grad_norm": 2.58088978525386, |
|
"learning_rate": 6.32967032967033e-05, |
|
"loss": 1.1843, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.4002741603838245, |
|
"grad_norm": 2.897001887626188, |
|
"learning_rate": 6.417582417582419e-05, |
|
"loss": 1.1996, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.40575736806031526, |
|
"grad_norm": 3.0910986197243573, |
|
"learning_rate": 6.505494505494506e-05, |
|
"loss": 1.215, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.41124057573680606, |
|
"grad_norm": 2.36902708485275, |
|
"learning_rate": 6.593406593406594e-05, |
|
"loss": 1.172, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.4167237834132968, |
|
"grad_norm": 2.9090585544359366, |
|
"learning_rate": 6.681318681318683e-05, |
|
"loss": 1.192, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.42220699108978754, |
|
"grad_norm": 2.682374090006958, |
|
"learning_rate": 6.76923076923077e-05, |
|
"loss": 1.1929, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.4276901987662783, |
|
"grad_norm": 2.6472629365834037, |
|
"learning_rate": 6.857142857142857e-05, |
|
"loss": 1.1578, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.433173406442769, |
|
"grad_norm": 2.58566062639449, |
|
"learning_rate": 6.945054945054945e-05, |
|
"loss": 1.153, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.43865661411925977, |
|
"grad_norm": 2.7406033885199848, |
|
"learning_rate": 7.032967032967034e-05, |
|
"loss": 1.1697, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.4441398217957505, |
|
"grad_norm": 2.137577114283556, |
|
"learning_rate": 7.12087912087912e-05, |
|
"loss": 1.1492, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.44962302947224125, |
|
"grad_norm": 2.9855017798950976, |
|
"learning_rate": 7.208791208791209e-05, |
|
"loss": 1.1584, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.455106237148732, |
|
"grad_norm": 2.220583427738456, |
|
"learning_rate": 7.296703296703297e-05, |
|
"loss": 1.1437, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.46058944482522274, |
|
"grad_norm": 2.706030331520761, |
|
"learning_rate": 7.384615384615386e-05, |
|
"loss": 1.1673, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.4660726525017135, |
|
"grad_norm": 3.190829729449872, |
|
"learning_rate": 7.472527472527473e-05, |
|
"loss": 1.1413, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.4715558601782042, |
|
"grad_norm": 1.9236806571413316, |
|
"learning_rate": 7.560439560439561e-05, |
|
"loss": 1.1546, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.477039067854695, |
|
"grad_norm": 2.9656722488411, |
|
"learning_rate": 7.64835164835165e-05, |
|
"loss": 1.1705, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.48252227553118576, |
|
"grad_norm": 1.7873438452480972, |
|
"learning_rate": 7.736263736263737e-05, |
|
"loss": 1.1512, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.4880054832076765, |
|
"grad_norm": 3.366153588378385, |
|
"learning_rate": 7.824175824175825e-05, |
|
"loss": 1.1854, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.49348869088416725, |
|
"grad_norm": 2.3979761080289306, |
|
"learning_rate": 7.912087912087914e-05, |
|
"loss": 1.1785, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.498971898560658, |
|
"grad_norm": 2.68813176726075, |
|
"learning_rate": 8e-05, |
|
"loss": 1.1689, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.5044551062371487, |
|
"grad_norm": 3.634421050069424, |
|
"learning_rate": 7.999970571955439e-05, |
|
"loss": 1.1534, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.5099383139136395, |
|
"grad_norm": 2.110037855631259, |
|
"learning_rate": 7.999882288254757e-05, |
|
"loss": 1.1398, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.5154215215901302, |
|
"grad_norm": 3.029801321990504, |
|
"learning_rate": 7.999735150196965e-05, |
|
"loss": 1.15, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.520904729266621, |
|
"grad_norm": 1.9878631276509169, |
|
"learning_rate": 7.999529159947053e-05, |
|
"loss": 1.1524, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.5263879369431117, |
|
"grad_norm": 2.997439251632094, |
|
"learning_rate": 7.999264320535968e-05, |
|
"loss": 1.161, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.5318711446196025, |
|
"grad_norm": 1.7997765525760119, |
|
"learning_rate": 7.998940635860564e-05, |
|
"loss": 1.1392, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.5373543522960932, |
|
"grad_norm": 2.3934821316457353, |
|
"learning_rate": 7.998558110683542e-05, |
|
"loss": 1.1511, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.542837559972584, |
|
"grad_norm": 2.394286604934712, |
|
"learning_rate": 7.998116750633388e-05, |
|
"loss": 1.1482, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.5483207676490747, |
|
"grad_norm": 2.5638379332850447, |
|
"learning_rate": 7.997616562204282e-05, |
|
"loss": 1.132, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5538039753255655, |
|
"grad_norm": 2.4073701211461715, |
|
"learning_rate": 7.99705755275601e-05, |
|
"loss": 1.1653, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.5592871830020562, |
|
"grad_norm": 2.9908052478558127, |
|
"learning_rate": 7.996439730513846e-05, |
|
"loss": 1.1437, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.564770390678547, |
|
"grad_norm": 2.338649947963062, |
|
"learning_rate": 7.995763104568444e-05, |
|
"loss": 1.1257, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.5702535983550377, |
|
"grad_norm": 2.1324859396744853, |
|
"learning_rate": 7.99502768487569e-05, |
|
"loss": 1.1082, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.5757368060315284, |
|
"grad_norm": 1.8207695969892468, |
|
"learning_rate": 7.994233482256567e-05, |
|
"loss": 1.1324, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.5812200137080192, |
|
"grad_norm": 3.0671728842532793, |
|
"learning_rate": 7.993380508396992e-05, |
|
"loss": 1.1673, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.5867032213845099, |
|
"grad_norm": 1.2132079987970015, |
|
"learning_rate": 7.992468775847638e-05, |
|
"loss": 1.1332, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.5921864290610007, |
|
"grad_norm": 2.8722682327215647, |
|
"learning_rate": 7.99149829802376e-05, |
|
"loss": 1.1937, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.5976696367374914, |
|
"grad_norm": 1.578063060667415, |
|
"learning_rate": 7.990469089204992e-05, |
|
"loss": 1.1516, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.6031528444139822, |
|
"grad_norm": 2.4024809989588336, |
|
"learning_rate": 7.989381164535131e-05, |
|
"loss": 1.1776, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.6086360520904729, |
|
"grad_norm": 1.7795121351579433, |
|
"learning_rate": 7.988234540021928e-05, |
|
"loss": 1.153, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.6141192597669637, |
|
"grad_norm": 2.5507155124877032, |
|
"learning_rate": 7.987029232536841e-05, |
|
"loss": 1.1546, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.6196024674434544, |
|
"grad_norm": 2.7262135259336095, |
|
"learning_rate": 7.98576525981479e-05, |
|
"loss": 1.1681, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.6250856751199452, |
|
"grad_norm": 1.756472159467694, |
|
"learning_rate": 7.9844426404539e-05, |
|
"loss": 1.1337, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.630568882796436, |
|
"grad_norm": 3.664337440475356, |
|
"learning_rate": 7.983061393915222e-05, |
|
"loss": 1.1458, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.6360520904729267, |
|
"grad_norm": 1.951846935673031, |
|
"learning_rate": 7.981621540522444e-05, |
|
"loss": 1.1322, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.6415352981494175, |
|
"grad_norm": 1.7976750357904085, |
|
"learning_rate": 7.980123101461606e-05, |
|
"loss": 1.1263, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.6470185058259081, |
|
"grad_norm": 2.300277427581849, |
|
"learning_rate": 7.978566098780771e-05, |
|
"loss": 1.1413, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.6525017135023989, |
|
"grad_norm": 2.360648922071168, |
|
"learning_rate": 7.976950555389713e-05, |
|
"loss": 1.1274, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.6579849211788896, |
|
"grad_norm": 2.0913575786408147, |
|
"learning_rate": 7.97527649505957e-05, |
|
"loss": 1.1213, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.6634681288553804, |
|
"grad_norm": 2.022249517249403, |
|
"learning_rate": 7.973543942422506e-05, |
|
"loss": 1.1175, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.6689513365318711, |
|
"grad_norm": 2.2294222010405362, |
|
"learning_rate": 7.97175292297134e-05, |
|
"loss": 1.1281, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.6744345442083619, |
|
"grad_norm": 1.7133244378182648, |
|
"learning_rate": 7.969903463059169e-05, |
|
"loss": 1.1099, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.6799177518848526, |
|
"grad_norm": 2.0157523028333775, |
|
"learning_rate": 7.96799558989899e-05, |
|
"loss": 1.1175, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.6854009595613434, |
|
"grad_norm": 1.7297097607076724, |
|
"learning_rate": 7.966029331563287e-05, |
|
"loss": 1.1131, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.6908841672378341, |
|
"grad_norm": 2.2317660437246034, |
|
"learning_rate": 7.964004716983635e-05, |
|
"loss": 1.1066, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.6963673749143249, |
|
"grad_norm": 1.9857286499887847, |
|
"learning_rate": 7.961921775950254e-05, |
|
"loss": 1.1083, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.7018505825908157, |
|
"grad_norm": 1.6946992419248856, |
|
"learning_rate": 7.959780539111585e-05, |
|
"loss": 1.0985, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.7073337902673064, |
|
"grad_norm": 1.949379922441364, |
|
"learning_rate": 7.957581037973835e-05, |
|
"loss": 1.0792, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.7128169979437972, |
|
"grad_norm": 2.0374474232968454, |
|
"learning_rate": 7.955323304900514e-05, |
|
"loss": 1.1081, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.7183002056202878, |
|
"grad_norm": 1.5382297067387796, |
|
"learning_rate": 7.953007373111956e-05, |
|
"loss": 1.0894, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.7237834132967786, |
|
"grad_norm": 1.994423662926792, |
|
"learning_rate": 7.950633276684833e-05, |
|
"loss": 1.0971, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.7292666209732693, |
|
"grad_norm": 1.6956272845939566, |
|
"learning_rate": 7.948201050551651e-05, |
|
"loss": 1.078, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.7347498286497601, |
|
"grad_norm": 1.808121530050818, |
|
"learning_rate": 7.945710730500243e-05, |
|
"loss": 1.087, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.7402330363262508, |
|
"grad_norm": 1.6031163127588859, |
|
"learning_rate": 7.943162353173232e-05, |
|
"loss": 1.0878, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.7457162440027416, |
|
"grad_norm": 2.8069443037445057, |
|
"learning_rate": 7.940555956067495e-05, |
|
"loss": 1.0852, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.7511994516792323, |
|
"grad_norm": 1.023257465419051, |
|
"learning_rate": 7.937891577533624e-05, |
|
"loss": 1.0864, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.7566826593557231, |
|
"grad_norm": 3.7981840568943266, |
|
"learning_rate": 7.93516925677534e-05, |
|
"loss": 1.1577, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.7621658670322139, |
|
"grad_norm": 1.957367420792266, |
|
"learning_rate": 7.932389033848931e-05, |
|
"loss": 1.1428, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.7676490747087046, |
|
"grad_norm": 2.620391650383465, |
|
"learning_rate": 7.929550949662659e-05, |
|
"loss": 1.1262, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.7731322823851954, |
|
"grad_norm": 1.8055917441902372, |
|
"learning_rate": 7.92665504597616e-05, |
|
"loss": 1.1087, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.7786154900616861, |
|
"grad_norm": 1.5140320658198094, |
|
"learning_rate": 7.923701365399826e-05, |
|
"loss": 1.1206, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.7840986977381769, |
|
"grad_norm": 2.3311158871467983, |
|
"learning_rate": 7.920689951394175e-05, |
|
"loss": 1.1354, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.7895819054146676, |
|
"grad_norm": 2.871009250239358, |
|
"learning_rate": 7.917620848269224e-05, |
|
"loss": 1.1159, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.7950651130911583, |
|
"grad_norm": 1.174246832683915, |
|
"learning_rate": 7.914494101183822e-05, |
|
"loss": 1.112, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.800548320767649, |
|
"grad_norm": 1.9715656445082748, |
|
"learning_rate": 7.911309756144995e-05, |
|
"loss": 1.1233, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.8060315284441398, |
|
"grad_norm": 1.3667047276351785, |
|
"learning_rate": 7.908067860007268e-05, |
|
"loss": 1.0966, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.8115147361206305, |
|
"grad_norm": 1.8908076306855728, |
|
"learning_rate": 7.904768460471975e-05, |
|
"loss": 1.118, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.8169979437971213, |
|
"grad_norm": 1.6160590718366654, |
|
"learning_rate": 7.90141160608655e-05, |
|
"loss": 1.0836, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.8224811514736121, |
|
"grad_norm": 2.257542807740706, |
|
"learning_rate": 7.897997346243825e-05, |
|
"loss": 1.0985, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.8279643591501028, |
|
"grad_norm": 1.2533944841043951, |
|
"learning_rate": 7.894525731181297e-05, |
|
"loss": 1.0678, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.8334475668265936, |
|
"grad_norm": 2.4996679708615988, |
|
"learning_rate": 7.890996811980386e-05, |
|
"loss": 1.1143, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.8389307745030843, |
|
"grad_norm": 1.6215671222990462, |
|
"learning_rate": 7.887410640565689e-05, |
|
"loss": 1.0947, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.8444139821795751, |
|
"grad_norm": 1.4102568455851006, |
|
"learning_rate": 7.883767269704209e-05, |
|
"loss": 1.0635, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.8498971898560658, |
|
"grad_norm": 2.0199914067223763, |
|
"learning_rate": 7.880066753004588e-05, |
|
"loss": 1.1007, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.8553803975325566, |
|
"grad_norm": 1.4233619206252466, |
|
"learning_rate": 7.876309144916312e-05, |
|
"loss": 1.0964, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.8608636052090473, |
|
"grad_norm": 1.5315953952088068, |
|
"learning_rate": 7.87249450072891e-05, |
|
"loss": 1.1091, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.866346812885538, |
|
"grad_norm": 1.5672306259553663, |
|
"learning_rate": 7.86862287657114e-05, |
|
"loss": 1.0961, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.8718300205620287, |
|
"grad_norm": 1.722337979582118, |
|
"learning_rate": 7.864694329410168e-05, |
|
"loss": 1.1032, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.8773132282385195, |
|
"grad_norm": 1.1707259582896943, |
|
"learning_rate": 7.860708917050722e-05, |
|
"loss": 1.083, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.8827964359150103, |
|
"grad_norm": 2.1260596185722216, |
|
"learning_rate": 7.85666669813425e-05, |
|
"loss": 1.0775, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.888279643591501, |
|
"grad_norm": 1.1049918316810494, |
|
"learning_rate": 7.852567732138051e-05, |
|
"loss": 1.0804, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.8937628512679918, |
|
"grad_norm": 1.5314067038496302, |
|
"learning_rate": 7.848412079374403e-05, |
|
"loss": 1.0931, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.8992460589444825, |
|
"grad_norm": 1.526207291286965, |
|
"learning_rate": 7.844199800989672e-05, |
|
"loss": 1.126, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.9047292666209733, |
|
"grad_norm": 1.4889589302339163, |
|
"learning_rate": 7.839930958963415e-05, |
|
"loss": 1.0688, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.910212474297464, |
|
"grad_norm": 1.8224369715346593, |
|
"learning_rate": 7.835605616107471e-05, |
|
"loss": 1.0981, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.9156956819739548, |
|
"grad_norm": 1.9809507356149727, |
|
"learning_rate": 7.83122383606503e-05, |
|
"loss": 1.0716, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.9211788896504455, |
|
"grad_norm": 1.319128673805323, |
|
"learning_rate": 7.826785683309702e-05, |
|
"loss": 1.0511, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.9266620973269363, |
|
"grad_norm": 1.7628250921107829, |
|
"learning_rate": 7.822291223144564e-05, |
|
"loss": 1.0815, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.932145305003427, |
|
"grad_norm": 1.5619673215880712, |
|
"learning_rate": 7.817740521701204e-05, |
|
"loss": 1.0851, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.9376285126799178, |
|
"grad_norm": 1.6877201996768514, |
|
"learning_rate": 7.813133645938744e-05, |
|
"loss": 1.0655, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.9431117203564084, |
|
"grad_norm": 1.4453595064062965, |
|
"learning_rate": 7.808470663642856e-05, |
|
"loss": 1.0468, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.9485949280328992, |
|
"grad_norm": 1.3988144950022152, |
|
"learning_rate": 7.803751643424769e-05, |
|
"loss": 1.0684, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.95407813570939, |
|
"grad_norm": 1.829632986890131, |
|
"learning_rate": 7.798976654720248e-05, |
|
"loss": 1.0658, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.9595613433858807, |
|
"grad_norm": 1.160599126459967, |
|
"learning_rate": 7.794145767788582e-05, |
|
"loss": 1.0666, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.9650445510623715, |
|
"grad_norm": 1.6311368539718085, |
|
"learning_rate": 7.789259053711554e-05, |
|
"loss": 1.0732, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.9705277587388622, |
|
"grad_norm": 1.3145190591456402, |
|
"learning_rate": 7.784316584392379e-05, |
|
"loss": 1.0564, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.976010966415353, |
|
"grad_norm": 1.434756052069605, |
|
"learning_rate": 7.779318432554663e-05, |
|
"loss": 1.0624, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.9814941740918437, |
|
"grad_norm": 1.3078999315572475, |
|
"learning_rate": 7.774264671741324e-05, |
|
"loss": 1.0486, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.9869773817683345, |
|
"grad_norm": 1.4474824177372518, |
|
"learning_rate": 7.769155376313509e-05, |
|
"loss": 1.0511, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.9924605894448252, |
|
"grad_norm": 1.4306506181803345, |
|
"learning_rate": 7.763990621449507e-05, |
|
"loss": 1.0657, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.997943797121316, |
|
"grad_norm": 1.5854524936397603, |
|
"learning_rate": 7.758770483143634e-05, |
|
"loss": 1.0638, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.0034270047978067, |
|
"grad_norm": 2.818624037527729, |
|
"learning_rate": 7.753495038205123e-05, |
|
"loss": 1.7465, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.0089102124742975, |
|
"grad_norm": 1.2531970095723952, |
|
"learning_rate": 7.748164364256989e-05, |
|
"loss": 1.0425, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.0143934201507883, |
|
"grad_norm": 1.8473887590213065, |
|
"learning_rate": 7.742778539734884e-05, |
|
"loss": 1.0309, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.019876627827279, |
|
"grad_norm": 1.1978766650574866, |
|
"learning_rate": 7.737337643885956e-05, |
|
"loss": 1.0365, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.0253598355037696, |
|
"grad_norm": 1.6327228122841784, |
|
"learning_rate": 7.73184175676766e-05, |
|
"loss": 1.0412, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.0308430431802604, |
|
"grad_norm": 1.038869594675331, |
|
"learning_rate": 7.726290959246606e-05, |
|
"loss": 1.0325, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.0363262508567512, |
|
"grad_norm": 1.8931974706378887, |
|
"learning_rate": 7.720685332997354e-05, |
|
"loss": 1.0575, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.041809458533242, |
|
"grad_norm": 1.0099409227594682, |
|
"learning_rate": 7.715024960501209e-05, |
|
"loss": 1.0356, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.0472926662097326, |
|
"grad_norm": 1.7017523773128445, |
|
"learning_rate": 7.709309925045023e-05, |
|
"loss": 1.048, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.0527758738862234, |
|
"grad_norm": 1.0632647956537205, |
|
"learning_rate": 7.70354031071995e-05, |
|
"loss": 1.0619, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.0582590815627142, |
|
"grad_norm": 1.6480122219648587, |
|
"learning_rate": 7.697716202420227e-05, |
|
"loss": 1.0626, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.063742289239205, |
|
"grad_norm": 1.337533514643958, |
|
"learning_rate": 7.691837685841913e-05, |
|
"loss": 1.0521, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.0692254969156956, |
|
"grad_norm": 1.405204005470527, |
|
"learning_rate": 7.685904847481631e-05, |
|
"loss": 1.0488, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.0747087045921864, |
|
"grad_norm": 1.0789567262015418, |
|
"learning_rate": 7.679917774635298e-05, |
|
"loss": 1.0354, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.0801919122686772, |
|
"grad_norm": 1.5059550846436562, |
|
"learning_rate": 7.673876555396835e-05, |
|
"loss": 1.0488, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.085675119945168, |
|
"grad_norm": 1.285703383014824, |
|
"learning_rate": 7.667781278656879e-05, |
|
"loss": 1.0326, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.0911583276216588, |
|
"grad_norm": 1.166732355574816, |
|
"learning_rate": 7.661632034101466e-05, |
|
"loss": 1.0322, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.0966415352981493, |
|
"grad_norm": 1.6867809480621192, |
|
"learning_rate": 7.655428912210718e-05, |
|
"loss": 1.0364, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.1021247429746401, |
|
"grad_norm": 1.2531266932796477, |
|
"learning_rate": 7.64917200425751e-05, |
|
"loss": 1.0374, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.107607950651131, |
|
"grad_norm": 1.6183973189746483, |
|
"learning_rate": 7.642861402306123e-05, |
|
"loss": 1.0302, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.1130911583276217, |
|
"grad_norm": 1.5885719958570816, |
|
"learning_rate": 7.636497199210895e-05, |
|
"loss": 1.0312, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.1185743660041123, |
|
"grad_norm": 1.271489486550103, |
|
"learning_rate": 7.630079488614853e-05, |
|
"loss": 1.0439, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.124057573680603, |
|
"grad_norm": 1.6077174464582225, |
|
"learning_rate": 7.623608364948334e-05, |
|
"loss": 1.0369, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.129540781357094, |
|
"grad_norm": 1.0526829284737067, |
|
"learning_rate": 7.617083923427596e-05, |
|
"loss": 1.0294, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.1350239890335847, |
|
"grad_norm": 1.8026775192610152, |
|
"learning_rate": 7.610506260053415e-05, |
|
"loss": 1.0249, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.1405071967100753, |
|
"grad_norm": 1.1092234118793582, |
|
"learning_rate": 7.603875471609677e-05, |
|
"loss": 1.0479, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.145990404386566, |
|
"grad_norm": 1.252308944775191, |
|
"learning_rate": 7.597191655661952e-05, |
|
"loss": 1.0377, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.1514736120630569, |
|
"grad_norm": 1.481646296662436, |
|
"learning_rate": 7.590454910556058e-05, |
|
"loss": 1.0475, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.1569568197395477, |
|
"grad_norm": 1.1007433182898057, |
|
"learning_rate": 7.583665335416608e-05, |
|
"loss": 1.0105, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.1624400274160385, |
|
"grad_norm": 1.2445540839929121, |
|
"learning_rate": 7.576823030145566e-05, |
|
"loss": 1.0236, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.167923235092529, |
|
"grad_norm": 1.4019899879808024, |
|
"learning_rate": 7.569928095420762e-05, |
|
"loss": 1.0437, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.1734064427690198, |
|
"grad_norm": 1.293921037672153, |
|
"learning_rate": 7.562980632694421e-05, |
|
"loss": 1.0078, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.1788896504455106, |
|
"grad_norm": 1.574451864443491, |
|
"learning_rate": 7.555980744191666e-05, |
|
"loss": 1.0183, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.1843728581220014, |
|
"grad_norm": 1.0261703383565228, |
|
"learning_rate": 7.548928532909006e-05, |
|
"loss": 1.0243, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.1898560657984922, |
|
"grad_norm": 1.3888687863743303, |
|
"learning_rate": 7.541824102612839e-05, |
|
"loss": 1.0241, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.1953392734749828, |
|
"grad_norm": 1.435498169074072, |
|
"learning_rate": 7.534667557837912e-05, |
|
"loss": 1.0348, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.2008224811514736, |
|
"grad_norm": 1.1812741919729888, |
|
"learning_rate": 7.527459003885783e-05, |
|
"loss": 1.013, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.2063056888279644, |
|
"grad_norm": 1.209488409322031, |
|
"learning_rate": 7.520198546823275e-05, |
|
"loss": 1.0279, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.211788896504455, |
|
"grad_norm": 1.5861672644314255, |
|
"learning_rate": 7.512886293480914e-05, |
|
"loss": 1.0217, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.2172721041809458, |
|
"grad_norm": 1.0784984727297116, |
|
"learning_rate": 7.505522351451363e-05, |
|
"loss": 1.0308, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.2227553118574366, |
|
"grad_norm": 1.4013682384096804, |
|
"learning_rate": 7.498106829087822e-05, |
|
"loss": 1.0209, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.2282385195339274, |
|
"grad_norm": 1.1069329108834502, |
|
"learning_rate": 7.490639835502458e-05, |
|
"loss": 1.0184, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.2337217272104182, |
|
"grad_norm": 1.2292930770165162, |
|
"learning_rate": 7.483121480564779e-05, |
|
"loss": 1.0192, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.2392049348869087, |
|
"grad_norm": 1.1091280093145537, |
|
"learning_rate": 7.475551874900027e-05, |
|
"loss": 1.0227, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.2446881425633995, |
|
"grad_norm": 1.3883445894297355, |
|
"learning_rate": 7.467931129887548e-05, |
|
"loss": 1.0317, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.2501713502398903, |
|
"grad_norm": 1.3258593612132867, |
|
"learning_rate": 7.460259357659155e-05, |
|
"loss": 1.0276, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.2556545579163811, |
|
"grad_norm": 1.2962983275547162, |
|
"learning_rate": 7.452536671097476e-05, |
|
"loss": 1.0155, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.261137765592872, |
|
"grad_norm": 1.2345976366071654, |
|
"learning_rate": 7.444763183834292e-05, |
|
"loss": 0.9999, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.2666209732693625, |
|
"grad_norm": 1.371675205771468, |
|
"learning_rate": 7.436939010248868e-05, |
|
"loss": 1.0242, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.2721041809458533, |
|
"grad_norm": 1.043563875055389, |
|
"learning_rate": 7.429064265466269e-05, |
|
"loss": 0.9978, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.2775873886223441, |
|
"grad_norm": 1.4841522976495107, |
|
"learning_rate": 7.421139065355663e-05, |
|
"loss": 1.0152, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.2830705962988347, |
|
"grad_norm": 0.9605681175633184, |
|
"learning_rate": 7.413163526528623e-05, |
|
"loss": 1.0162, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.2885538039753255, |
|
"grad_norm": 1.2067100095706547, |
|
"learning_rate": 7.405137766337406e-05, |
|
"loss": 1.0565, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.2940370116518163, |
|
"grad_norm": 1.5489835530200247, |
|
"learning_rate": 7.397061902873223e-05, |
|
"loss": 1.0316, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.299520219328307, |
|
"grad_norm": 0.936867837906044, |
|
"learning_rate": 7.388936054964512e-05, |
|
"loss": 1.0218, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.3050034270047979, |
|
"grad_norm": 1.3113159040022304, |
|
"learning_rate": 7.38076034217518e-05, |
|
"loss": 1.0305, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.3104866346812885, |
|
"grad_norm": 0.9712447789123708, |
|
"learning_rate": 7.372534884802844e-05, |
|
"loss": 1.0403, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.3159698423577793, |
|
"grad_norm": 1.2727545703286138, |
|
"learning_rate": 7.364259803877072e-05, |
|
"loss": 1.0366, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.32145305003427, |
|
"grad_norm": 1.1239260262608586, |
|
"learning_rate": 7.355935221157584e-05, |
|
"loss": 1.0031, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.3269362577107608, |
|
"grad_norm": 1.1122104721303026, |
|
"learning_rate": 7.347561259132479e-05, |
|
"loss": 1.0358, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.3324194653872516, |
|
"grad_norm": 1.4676521237806195, |
|
"learning_rate": 7.33913804101642e-05, |
|
"loss": 1.0362, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.3379026730637422, |
|
"grad_norm": 1.2984708218697247, |
|
"learning_rate": 7.330665690748825e-05, |
|
"loss": 1.0095, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.343385880740233, |
|
"grad_norm": 1.2692918018410921, |
|
"learning_rate": 7.322144332992047e-05, |
|
"loss": 1.0126, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.3488690884167238, |
|
"grad_norm": 0.9963650435822852, |
|
"learning_rate": 7.313574093129532e-05, |
|
"loss": 0.9937, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.3543522960932146, |
|
"grad_norm": 1.45458036698363, |
|
"learning_rate": 7.30495509726398e-05, |
|
"loss": 1.0389, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.3598355037697054, |
|
"grad_norm": 1.0844293357139385, |
|
"learning_rate": 7.29628747221549e-05, |
|
"loss": 1.042, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.365318711446196, |
|
"grad_norm": 1.3150162241275731, |
|
"learning_rate": 7.287571345519688e-05, |
|
"loss": 1.0076, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.3708019191226868, |
|
"grad_norm": 1.170486608498072, |
|
"learning_rate": 7.278806845425856e-05, |
|
"loss": 1.0411, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.3762851267991776, |
|
"grad_norm": 1.0369874340952356, |
|
"learning_rate": 7.269994100895047e-05, |
|
"loss": 1.0137, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.3817683344756682, |
|
"grad_norm": 1.3592340170974726, |
|
"learning_rate": 7.261133241598177e-05, |
|
"loss": 1.0104, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.387251542152159, |
|
"grad_norm": 1.0387909603628305, |
|
"learning_rate": 7.25222439791413e-05, |
|
"loss": 1.0039, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.3927347498286498, |
|
"grad_norm": 1.2165525508974753, |
|
"learning_rate": 7.24326770092783e-05, |
|
"loss": 1.0117, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.3982179575051406, |
|
"grad_norm": 1.0298641450176182, |
|
"learning_rate": 7.234263282428312e-05, |
|
"loss": 1.0087, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.4037011651816313, |
|
"grad_norm": 2.3062469579312768, |
|
"learning_rate": 7.225211274906795e-05, |
|
"loss": 1.0376, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.409184372858122, |
|
"grad_norm": 1.0448171443413676, |
|
"learning_rate": 7.216111811554718e-05, |
|
"loss": 1.015, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.4146675805346127, |
|
"grad_norm": 2.282441256400205, |
|
"learning_rate": 7.206965026261787e-05, |
|
"loss": 1.0506, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.4201507882111035, |
|
"grad_norm": 1.5228239109337287, |
|
"learning_rate": 7.197771053614006e-05, |
|
"loss": 1.0488, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.4256339958875943, |
|
"grad_norm": 7.1797647736282535, |
|
"learning_rate": 7.188530028891691e-05, |
|
"loss": 1.0477, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.4311172035640851, |
|
"grad_norm": 19.858381633007593, |
|
"learning_rate": 7.179242088067487e-05, |
|
"loss": 1.2469, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.4366004112405757, |
|
"grad_norm": 12.439075720330774, |
|
"learning_rate": 7.169907367804363e-05, |
|
"loss": 1.3376, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.4420836189170665, |
|
"grad_norm": 10.336294598744168, |
|
"learning_rate": 7.160526005453599e-05, |
|
"loss": 1.2434, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.4475668265935573, |
|
"grad_norm": 3325.4750490493866, |
|
"learning_rate": 7.151098139052772e-05, |
|
"loss": 4.0042, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.4530500342700479, |
|
"grad_norm": 131.55901739711575, |
|
"learning_rate": 7.141623907323717e-05, |
|
"loss": 1.8859, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.4585332419465387, |
|
"grad_norm": 11.388495314344222, |
|
"learning_rate": 7.13210344967049e-05, |
|
"loss": 1.3033, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.4640164496230295, |
|
"grad_norm": 5.045627021720784, |
|
"learning_rate": 7.122536906177318e-05, |
|
"loss": 1.1984, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.4694996572995203, |
|
"grad_norm": 63.13120968720244, |
|
"learning_rate": 7.112924417606536e-05, |
|
"loss": 1.2658, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.474982864976011, |
|
"grad_norm": 78.12090408575494, |
|
"learning_rate": 7.103266125396512e-05, |
|
"loss": 1.6972, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.4804660726525016, |
|
"grad_norm": 40.95778397870689, |
|
"learning_rate": 7.093562171659577e-05, |
|
"loss": 1.4509, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.4859492803289924, |
|
"grad_norm": 37.962763812559935, |
|
"learning_rate": 7.083812699179919e-05, |
|
"loss": 1.34, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.4914324880054832, |
|
"grad_norm": 26.82750605185721, |
|
"learning_rate": 7.074017851411495e-05, |
|
"loss": 1.2961, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.496915695681974, |
|
"grad_norm": 4.438206627978028, |
|
"learning_rate": 7.064177772475912e-05, |
|
"loss": 1.2112, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.5023989033584648, |
|
"grad_norm": 8.269192562168978, |
|
"learning_rate": 7.054292607160313e-05, |
|
"loss": 1.1848, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.5078821110349554, |
|
"grad_norm": 3.4078464376801048, |
|
"learning_rate": 7.044362500915239e-05, |
|
"loss": 1.1018, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.5133653187114462, |
|
"grad_norm": 3.8991572015827836, |
|
"learning_rate": 7.034387599852494e-05, |
|
"loss": 1.0906, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.518848526387937, |
|
"grad_norm": 2.2792452805163577, |
|
"learning_rate": 7.024368050742996e-05, |
|
"loss": 1.0535, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.5243317340644276, |
|
"grad_norm": 7.92182168324662, |
|
"learning_rate": 7.014304001014614e-05, |
|
"loss": 1.09, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.5298149417409186, |
|
"grad_norm": 4.604463247427479, |
|
"learning_rate": 7.004195598749997e-05, |
|
"loss": 1.0929, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.5352981494174092, |
|
"grad_norm": 1.6269169421948018, |
|
"learning_rate": 6.994042992684406e-05, |
|
"loss": 1.0721, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.5407813570939, |
|
"grad_norm": 3.1522500098672723, |
|
"learning_rate": 6.983846332203508e-05, |
|
"loss": 1.052, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.5462645647703908, |
|
"grad_norm": 1.744104230884076, |
|
"learning_rate": 6.973605767341194e-05, |
|
"loss": 1.0536, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.5517477724468813, |
|
"grad_norm": 1.7435972526503143, |
|
"learning_rate": 6.963321448777367e-05, |
|
"loss": 1.0562, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.5572309801233721, |
|
"grad_norm": 2.0699287078075783, |
|
"learning_rate": 6.952993527835714e-05, |
|
"loss": 1.0508, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.562714187799863, |
|
"grad_norm": 2.5216940307065374, |
|
"learning_rate": 6.942622156481498e-05, |
|
"loss": 1.0696, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.5681973954763535, |
|
"grad_norm": 1.5542171373751579, |
|
"learning_rate": 6.932207487319305e-05, |
|
"loss": 1.0351, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.5736806031528445, |
|
"grad_norm": 1.1081954487560843, |
|
"learning_rate": 6.921749673590813e-05, |
|
"loss": 1.0399, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.579163810829335, |
|
"grad_norm": 1.5993253594110808, |
|
"learning_rate": 6.911248869172523e-05, |
|
"loss": 1.0364, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.584647018505826, |
|
"grad_norm": 1.077571235080621, |
|
"learning_rate": 6.900705228573507e-05, |
|
"loss": 1.0142, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.5901302261823167, |
|
"grad_norm": 1.2982202905714664, |
|
"learning_rate": 6.890118906933126e-05, |
|
"loss": 1.0182, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.5956134338588073, |
|
"grad_norm": 1.4040374561281856, |
|
"learning_rate": 6.879490060018754e-05, |
|
"loss": 1.0215, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.6010966415352983, |
|
"grad_norm": 1.6011611520244904, |
|
"learning_rate": 6.86881884422348e-05, |
|
"loss": 0.9977, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.6065798492117889, |
|
"grad_norm": 1.7528977017352316, |
|
"learning_rate": 6.858105416563812e-05, |
|
"loss": 0.9914, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.6120630568882797, |
|
"grad_norm": 1.0189454039968566, |
|
"learning_rate": 6.847349934677363e-05, |
|
"loss": 1.0233, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.6175462645647705, |
|
"grad_norm": 1.6918231992095927, |
|
"learning_rate": 6.836552556820533e-05, |
|
"loss": 1.0325, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.623029472241261, |
|
"grad_norm": 0.9564755595265833, |
|
"learning_rate": 6.82571344186618e-05, |
|
"loss": 1.0198, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.6285126799177518, |
|
"grad_norm": 1.2847377343488389, |
|
"learning_rate": 6.814832749301285e-05, |
|
"loss": 1.0148, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.6339958875942426, |
|
"grad_norm": 0.9873795646279533, |
|
"learning_rate": 6.803910639224598e-05, |
|
"loss": 1.0357, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.6394790952707334, |
|
"grad_norm": 1.5820758914116304, |
|
"learning_rate": 6.792947272344292e-05, |
|
"loss": 1.0376, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.6449623029472242, |
|
"grad_norm": 0.901645544400314, |
|
"learning_rate": 6.78194280997559e-05, |
|
"loss": 1.0506, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.6504455106237148, |
|
"grad_norm": 1.4932631104259941, |
|
"learning_rate": 6.770897414038398e-05, |
|
"loss": 1.0178, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.6559287183002056, |
|
"grad_norm": 0.7587099965919871, |
|
"learning_rate": 6.759811247054918e-05, |
|
"loss": 1.0304, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.6614119259766964, |
|
"grad_norm": 1.6220429069155198, |
|
"learning_rate": 6.748684472147255e-05, |
|
"loss": 1.0042, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.666895133653187, |
|
"grad_norm": 0.9060198165841729, |
|
"learning_rate": 6.737517253035027e-05, |
|
"loss": 1.0264, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.672378341329678, |
|
"grad_norm": 0.7810840322821543, |
|
"learning_rate": 6.726309754032942e-05, |
|
"loss": 1.0194, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.6778615490061686, |
|
"grad_norm": 0.8389807329289671, |
|
"learning_rate": 6.715062140048392e-05, |
|
"loss": 1.0336, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.6833447566826594, |
|
"grad_norm": 0.9811139261812258, |
|
"learning_rate": 6.703774576579018e-05, |
|
"loss": 0.9961, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.6888279643591502, |
|
"grad_norm": 1.693203875036964, |
|
"learning_rate": 6.69244722971028e-05, |
|
"loss": 1.0054, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.6943111720356407, |
|
"grad_norm": 0.6313152362555375, |
|
"learning_rate": 6.681080266113017e-05, |
|
"loss": 1.0138, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.6997943797121315, |
|
"grad_norm": 1.2050041258375503, |
|
"learning_rate": 6.669673853040979e-05, |
|
"loss": 1.0181, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.7052775873886223, |
|
"grad_norm": 1.0692574241468813, |
|
"learning_rate": 6.658228158328384e-05, |
|
"loss": 1.0196, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.7107607950651131, |
|
"grad_norm": 1.3523681454597505, |
|
"learning_rate": 6.646743350387438e-05, |
|
"loss": 1.0405, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.716244002741604, |
|
"grad_norm": 0.9778959208393927, |
|
"learning_rate": 6.635219598205863e-05, |
|
"loss": 1.0308, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.7217272104180945, |
|
"grad_norm": 0.9710639823435132, |
|
"learning_rate": 6.623657071344407e-05, |
|
"loss": 0.9845, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.7272104180945853, |
|
"grad_norm": 1.1795695782951767, |
|
"learning_rate": 6.61205593993434e-05, |
|
"loss": 1.0354, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.732693625771076, |
|
"grad_norm": 1.256640921349798, |
|
"learning_rate": 6.600416374674978e-05, |
|
"loss": 1.0074, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.7381768334475667, |
|
"grad_norm": 0.989814843263136, |
|
"learning_rate": 6.588738546831136e-05, |
|
"loss": 1.025, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.7436600411240577, |
|
"grad_norm": 0.9259711398370861, |
|
"learning_rate": 6.577022628230638e-05, |
|
"loss": 1.0132, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.7491432488005483, |
|
"grad_norm": 0.7510586657590674, |
|
"learning_rate": 6.565268791261769e-05, |
|
"loss": 1.0092, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.754626456477039, |
|
"grad_norm": 0.6249763060539171, |
|
"learning_rate": 6.553477208870748e-05, |
|
"loss": 1.0198, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.7601096641535299, |
|
"grad_norm": 0.6961806925064483, |
|
"learning_rate": 6.541648054559182e-05, |
|
"loss": 1.0265, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.7655928718300204, |
|
"grad_norm": 0.8447275384338139, |
|
"learning_rate": 6.529781502381509e-05, |
|
"loss": 1.014, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.7710760795065115, |
|
"grad_norm": 1.052987904165586, |
|
"learning_rate": 6.517877726942445e-05, |
|
"loss": 0.9936, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.776559287183002, |
|
"grad_norm": 1.2555546445227173, |
|
"learning_rate": 6.505936903394406e-05, |
|
"loss": 0.9993, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.7820424948594928, |
|
"grad_norm": 0.7871410118515071, |
|
"learning_rate": 6.493959207434934e-05, |
|
"loss": 1.0246, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.7875257025359836, |
|
"grad_norm": 0.8254368841706734, |
|
"learning_rate": 6.481944815304117e-05, |
|
"loss": 0.9871, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.7930089102124742, |
|
"grad_norm": 2.389893141482841, |
|
"learning_rate": 6.469893903781987e-05, |
|
"loss": 1.0288, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.798492117888965, |
|
"grad_norm": 0.8529982927275416, |
|
"learning_rate": 6.457806650185925e-05, |
|
"loss": 1.0209, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.8039753255654558, |
|
"grad_norm": 1.9419731761625634, |
|
"learning_rate": 6.44568323236805e-05, |
|
"loss": 0.9977, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.8094585332419464, |
|
"grad_norm": 0.8112015130720743, |
|
"learning_rate": 6.433523828712599e-05, |
|
"loss": 1.0182, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.8149417409184374, |
|
"grad_norm": 1.8647552723000032, |
|
"learning_rate": 6.421328618133312e-05, |
|
"loss": 1.0059, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.820424948594928, |
|
"grad_norm": 1.1202805462423913, |
|
"learning_rate": 6.409097780070789e-05, |
|
"loss": 1.0315, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.8259081562714188, |
|
"grad_norm": 1.4717552883952516, |
|
"learning_rate": 6.396831494489852e-05, |
|
"loss": 1.0178, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.8313913639479096, |
|
"grad_norm": 1.247198837427445, |
|
"learning_rate": 6.384529941876902e-05, |
|
"loss": 1.0244, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.8368745716244002, |
|
"grad_norm": 1.165713928714055, |
|
"learning_rate": 6.372193303237258e-05, |
|
"loss": 1.0234, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.8423577793008912, |
|
"grad_norm": 0.819719891951629, |
|
"learning_rate": 6.359821760092493e-05, |
|
"loss": 1.0252, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.8478409869773817, |
|
"grad_norm": 0.9694295545160206, |
|
"learning_rate": 6.347415494477771e-05, |
|
"loss": 1.0005, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.8533241946538725, |
|
"grad_norm": 0.833369832433467, |
|
"learning_rate": 6.334974688939161e-05, |
|
"loss": 1.0005, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.8588074023303633, |
|
"grad_norm": 1.051580411769736, |
|
"learning_rate": 6.322499526530951e-05, |
|
"loss": 0.994, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.864290610006854, |
|
"grad_norm": 1.3409908447267063, |
|
"learning_rate": 6.30999019081296e-05, |
|
"loss": 1.0051, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.8697738176833447, |
|
"grad_norm": 0.7200296682178966, |
|
"learning_rate": 6.297446865847833e-05, |
|
"loss": 0.9916, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.8752570253598355, |
|
"grad_norm": 0.5703748532613969, |
|
"learning_rate": 6.284869736198332e-05, |
|
"loss": 1.0117, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.880740233036326, |
|
"grad_norm": 0.656233302341983, |
|
"learning_rate": 6.272258986924624e-05, |
|
"loss": 0.9995, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.8862234407128171, |
|
"grad_norm": 0.9959957194372924, |
|
"learning_rate": 6.259614803581553e-05, |
|
"loss": 0.9863, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.8917066483893077, |
|
"grad_norm": 1.2637430599411104, |
|
"learning_rate": 6.246937372215916e-05, |
|
"loss": 1.0044, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.8971898560657985, |
|
"grad_norm": 0.6655587539599807, |
|
"learning_rate": 6.23422687936372e-05, |
|
"loss": 1.004, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.9026730637422893, |
|
"grad_norm": 0.6292486765776564, |
|
"learning_rate": 6.22148351204744e-05, |
|
"loss": 1.0071, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.9081562714187799, |
|
"grad_norm": 0.7336634351921288, |
|
"learning_rate": 6.208707457773267e-05, |
|
"loss": 0.9866, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.9136394790952709, |
|
"grad_norm": 0.6842175616230739, |
|
"learning_rate": 6.195898904528346e-05, |
|
"loss": 0.9911, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.9191226867717615, |
|
"grad_norm": 0.6937670660831978, |
|
"learning_rate": 6.183058040778018e-05, |
|
"loss": 0.9927, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.9246058944482523, |
|
"grad_norm": 0.6943683013295557, |
|
"learning_rate": 6.170185055463039e-05, |
|
"loss": 0.9883, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.930089102124743, |
|
"grad_norm": 0.6759661412127859, |
|
"learning_rate": 6.157280137996797e-05, |
|
"loss": 0.9792, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.9355723098012336, |
|
"grad_norm": 0.7487530675075756, |
|
"learning_rate": 6.14434347826254e-05, |
|
"loss": 0.9917, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.9410555174777244, |
|
"grad_norm": 0.8480627285748816, |
|
"learning_rate": 6.131375266610564e-05, |
|
"loss": 1.0116, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.9465387251542152, |
|
"grad_norm": 1.026881712032747, |
|
"learning_rate": 6.118375693855426e-05, |
|
"loss": 0.997, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.9520219328307058, |
|
"grad_norm": 1.1819824734836144, |
|
"learning_rate": 6.10534495127313e-05, |
|
"loss": 0.9866, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.9575051405071968, |
|
"grad_norm": 0.7488114847350166, |
|
"learning_rate": 6.092283230598311e-05, |
|
"loss": 0.9992, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.9629883481836874, |
|
"grad_norm": 0.5942125372742829, |
|
"learning_rate": 6.079190724021418e-05, |
|
"loss": 1.0019, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.9684715558601782, |
|
"grad_norm": 0.6416782063906716, |
|
"learning_rate": 6.066067624185886e-05, |
|
"loss": 0.9851, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.973954763536669, |
|
"grad_norm": 0.6541261024351206, |
|
"learning_rate": 6.0529141241852974e-05, |
|
"loss": 0.9967, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.9794379712131596, |
|
"grad_norm": 0.7497124499642812, |
|
"learning_rate": 6.0397304175605444e-05, |
|
"loss": 0.9809, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.9849211788896506, |
|
"grad_norm": 0.868567720778104, |
|
"learning_rate": 6.026516698296979e-05, |
|
"loss": 1.0105, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.9904043865661412, |
|
"grad_norm": 0.9302778924884637, |
|
"learning_rate": 6.0132731608215626e-05, |
|
"loss": 0.9922, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.995887594242632, |
|
"grad_norm": 0.9976983084275789, |
|
"learning_rate": 6.000000000000001e-05, |
|
"loss": 1.0146, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.0013708019191228, |
|
"grad_norm": 1.7195064119299488, |
|
"learning_rate": 5.9866974111338764e-05, |
|
"loss": 1.5667, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.0068540095956133, |
|
"grad_norm": 0.8460708264395937, |
|
"learning_rate": 5.973365589957777e-05, |
|
"loss": 0.9585, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.0123372172721044, |
|
"grad_norm": 0.6860754084451314, |
|
"learning_rate": 5.96000473263642e-05, |
|
"loss": 0.9479, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 2.017820424948595, |
|
"grad_norm": 0.569858067536389, |
|
"learning_rate": 5.946615035761756e-05, |
|
"loss": 0.9485, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.0233036326250855, |
|
"grad_norm": 0.4334842237308715, |
|
"learning_rate": 5.9331966963500825e-05, |
|
"loss": 0.9368, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 2.0287868403015765, |
|
"grad_norm": 0.5181677147256706, |
|
"learning_rate": 5.919749911839146e-05, |
|
"loss": 0.9804, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.034270047978067, |
|
"grad_norm": 0.6881500556087109, |
|
"learning_rate": 5.9062748800852315e-05, |
|
"loss": 0.9499, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 2.039753255654558, |
|
"grad_norm": 0.7541551892158753, |
|
"learning_rate": 5.892771799360258e-05, |
|
"loss": 0.9832, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.0452364633310487, |
|
"grad_norm": 0.7791303072747863, |
|
"learning_rate": 5.879240868348857e-05, |
|
"loss": 0.9449, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.0507196710075393, |
|
"grad_norm": 0.7144846911633607, |
|
"learning_rate": 5.865682286145446e-05, |
|
"loss": 0.9534, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.0562028786840303, |
|
"grad_norm": 0.677470076470832, |
|
"learning_rate": 5.852096252251308e-05, |
|
"loss": 0.9579, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.061686086360521, |
|
"grad_norm": 0.7427233443080802, |
|
"learning_rate": 5.8384829665716475e-05, |
|
"loss": 0.9233, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.0671692940370114, |
|
"grad_norm": 0.7777873567557757, |
|
"learning_rate": 5.824842629412653e-05, |
|
"loss": 0.9706, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 2.0726525017135025, |
|
"grad_norm": 0.7135923513575179, |
|
"learning_rate": 5.8111754414785504e-05, |
|
"loss": 0.9232, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 2.078135709389993, |
|
"grad_norm": 0.7125391347465736, |
|
"learning_rate": 5.797481603868646e-05, |
|
"loss": 0.9525, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 2.083618917066484, |
|
"grad_norm": 0.7238619031777882, |
|
"learning_rate": 5.783761318074373e-05, |
|
"loss": 0.9525, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.0891021247429746, |
|
"grad_norm": 0.7128949358918412, |
|
"learning_rate": 5.770014785976322e-05, |
|
"loss": 0.9511, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 2.094585332419465, |
|
"grad_norm": 0.7209215696044494, |
|
"learning_rate": 5.756242209841272e-05, |
|
"loss": 0.9636, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 2.1000685400959562, |
|
"grad_norm": 0.6327662060979778, |
|
"learning_rate": 5.742443792319216e-05, |
|
"loss": 0.968, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 2.105551747772447, |
|
"grad_norm": 0.4985051187489137, |
|
"learning_rate": 5.728619736440375e-05, |
|
"loss": 0.953, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 2.111034955448938, |
|
"grad_norm": 0.3861018767101939, |
|
"learning_rate": 5.714770245612217e-05, |
|
"loss": 0.967, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.1165181631254284, |
|
"grad_norm": 0.32959266195178644, |
|
"learning_rate": 5.700895523616459e-05, |
|
"loss": 0.9281, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 2.122001370801919, |
|
"grad_norm": 0.34591568155580654, |
|
"learning_rate": 5.6869957746060675e-05, |
|
"loss": 0.9521, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 2.12748457847841, |
|
"grad_norm": 0.4625069922403467, |
|
"learning_rate": 5.673071203102261e-05, |
|
"loss": 0.9628, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 2.1329677861549006, |
|
"grad_norm": 0.4988336639530923, |
|
"learning_rate": 5.6591220139914945e-05, |
|
"loss": 0.9568, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 2.138450993831391, |
|
"grad_norm": 0.49194383493690574, |
|
"learning_rate": 5.645148412522447e-05, |
|
"loss": 0.9524, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.143934201507882, |
|
"grad_norm": 0.43352665389513567, |
|
"learning_rate": 5.6311506043030006e-05, |
|
"loss": 0.9469, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 2.1494174091843727, |
|
"grad_norm": 0.3306646386411896, |
|
"learning_rate": 5.61712879529722e-05, |
|
"loss": 0.9417, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 2.1549006168608638, |
|
"grad_norm": 0.29441855117758814, |
|
"learning_rate": 5.6030831918223136e-05, |
|
"loss": 0.9579, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 2.1603838245373543, |
|
"grad_norm": 0.3597929142259359, |
|
"learning_rate": 5.5890140005456056e-05, |
|
"loss": 0.9299, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 2.165867032213845, |
|
"grad_norm": 0.4844488175844035, |
|
"learning_rate": 5.574921428481487e-05, |
|
"loss": 0.9401, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.171350239890336, |
|
"grad_norm": 0.6802142906239217, |
|
"learning_rate": 5.5608056829883796e-05, |
|
"loss": 0.9357, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 2.1768334475668265, |
|
"grad_norm": 1.2106002196981427, |
|
"learning_rate": 5.546666971765675e-05, |
|
"loss": 0.9651, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 2.1823166552433175, |
|
"grad_norm": 0.6157717006727198, |
|
"learning_rate": 5.532505502850688e-05, |
|
"loss": 0.9528, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 2.187799862919808, |
|
"grad_norm": 0.641804078912436, |
|
"learning_rate": 5.5183214846155864e-05, |
|
"loss": 0.9718, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 2.1932830705962987, |
|
"grad_norm": 0.6070401599275227, |
|
"learning_rate": 5.504115125764329e-05, |
|
"loss": 0.9572, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.1987662782727897, |
|
"grad_norm": 0.7100305243791368, |
|
"learning_rate": 5.489886635329598e-05, |
|
"loss": 0.9417, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 2.2042494859492803, |
|
"grad_norm": 0.9164307400605365, |
|
"learning_rate": 5.4756362226697193e-05, |
|
"loss": 0.9449, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 2.209732693625771, |
|
"grad_norm": 1.3336341868468473, |
|
"learning_rate": 5.461364097465581e-05, |
|
"loss": 0.9758, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 2.215215901302262, |
|
"grad_norm": 0.5707410680573715, |
|
"learning_rate": 5.447070469717552e-05, |
|
"loss": 0.9509, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 2.2206991089787524, |
|
"grad_norm": 0.5349427819988657, |
|
"learning_rate": 5.4327555497423874e-05, |
|
"loss": 0.977, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.2261823166552435, |
|
"grad_norm": 0.6665137379275449, |
|
"learning_rate": 5.4184195481701425e-05, |
|
"loss": 0.9718, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 2.231665524331734, |
|
"grad_norm": 0.6788016902611529, |
|
"learning_rate": 5.4040626759410625e-05, |
|
"loss": 0.9559, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 2.2371487320082246, |
|
"grad_norm": 0.7062646461019303, |
|
"learning_rate": 5.3896851443024837e-05, |
|
"loss": 0.9663, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 2.2426319396847156, |
|
"grad_norm": 0.6871839246478715, |
|
"learning_rate": 5.375287164805727e-05, |
|
"loss": 0.9814, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 2.248115147361206, |
|
"grad_norm": 0.5968021041080723, |
|
"learning_rate": 5.360868949302986e-05, |
|
"loss": 0.937, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.2535983550376972, |
|
"grad_norm": 0.5805892891972629, |
|
"learning_rate": 5.3464307099442035e-05, |
|
"loss": 0.9479, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 2.259081562714188, |
|
"grad_norm": 0.4860199686870337, |
|
"learning_rate": 5.3319726591739536e-05, |
|
"loss": 0.9541, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 2.2645647703906784, |
|
"grad_norm": 0.5697676557756951, |
|
"learning_rate": 5.317495009728319e-05, |
|
"loss": 0.9471, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 2.2700479780671694, |
|
"grad_norm": 0.3474403252554382, |
|
"learning_rate": 5.302997974631757e-05, |
|
"loss": 0.9535, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 2.27553118574366, |
|
"grad_norm": 0.3892967631879122, |
|
"learning_rate": 5.288481767193963e-05, |
|
"loss": 0.9573, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.2810143934201506, |
|
"grad_norm": 0.43748961608691317, |
|
"learning_rate": 5.2739466010067385e-05, |
|
"loss": 0.9377, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 2.2864976010966416, |
|
"grad_norm": 0.410672984619657, |
|
"learning_rate": 5.259392689940841e-05, |
|
"loss": 0.9313, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 2.291980808773132, |
|
"grad_norm": 0.44449306463157623, |
|
"learning_rate": 5.244820248142844e-05, |
|
"loss": 0.9373, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 2.297464016449623, |
|
"grad_norm": 0.457420584226533, |
|
"learning_rate": 5.2302294900319796e-05, |
|
"loss": 0.9627, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 2.3029472241261137, |
|
"grad_norm": 0.49606181089497686, |
|
"learning_rate": 5.215620630296988e-05, |
|
"loss": 0.9491, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.3084304318026048, |
|
"grad_norm": 0.5557714109621562, |
|
"learning_rate": 5.200993883892956e-05, |
|
"loss": 0.9538, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 2.3139136394790953, |
|
"grad_norm": 0.5848899773483123, |
|
"learning_rate": 5.1863494660381586e-05, |
|
"loss": 0.9589, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 2.319396847155586, |
|
"grad_norm": 0.5756814498770187, |
|
"learning_rate": 5.1716875922108836e-05, |
|
"loss": 0.9437, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 2.324880054832077, |
|
"grad_norm": 0.5975395685498862, |
|
"learning_rate": 5.1570084781462716e-05, |
|
"loss": 0.9752, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 2.3303632625085675, |
|
"grad_norm": 0.6741079069902028, |
|
"learning_rate": 5.142312339833131e-05, |
|
"loss": 0.9695, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.335846470185058, |
|
"grad_norm": 0.47322117724121776, |
|
"learning_rate": 5.1275993935107714e-05, |
|
"loss": 0.9376, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 2.341329677861549, |
|
"grad_norm": 0.3713624782507764, |
|
"learning_rate": 5.112869855665811e-05, |
|
"loss": 0.9587, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 2.3468128855380397, |
|
"grad_norm": 0.3297040646564409, |
|
"learning_rate": 5.098123943028999e-05, |
|
"loss": 0.95, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 2.3522960932145303, |
|
"grad_norm": 0.3283051265027244, |
|
"learning_rate": 5.0833618725720214e-05, |
|
"loss": 0.9585, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 2.3577793008910213, |
|
"grad_norm": 0.33355324970835326, |
|
"learning_rate": 5.0685838615043124e-05, |
|
"loss": 0.9373, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.363262508567512, |
|
"grad_norm": 0.32077316347401524, |
|
"learning_rate": 5.053790127269855e-05, |
|
"loss": 0.9281, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 2.368745716244003, |
|
"grad_norm": 0.29194427533135364, |
|
"learning_rate": 5.038980887543987e-05, |
|
"loss": 0.9565, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 2.3742289239204935, |
|
"grad_norm": 0.2613277032435807, |
|
"learning_rate": 5.024156360230189e-05, |
|
"loss": 0.9654, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 2.3797121315969845, |
|
"grad_norm": 0.30838651942670436, |
|
"learning_rate": 5.0093167634568874e-05, |
|
"loss": 0.9555, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 2.385195339273475, |
|
"grad_norm": 0.3084305013600498, |
|
"learning_rate": 4.9944623155742395e-05, |
|
"loss": 0.9301, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.3906785469499656, |
|
"grad_norm": 0.3387529105331699, |
|
"learning_rate": 4.979593235150924e-05, |
|
"loss": 0.9428, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 2.3961617546264566, |
|
"grad_norm": 0.3995434200366217, |
|
"learning_rate": 4.9647097409709186e-05, |
|
"loss": 0.9405, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 2.401644962302947, |
|
"grad_norm": 0.41290791299928126, |
|
"learning_rate": 4.94981205203029e-05, |
|
"loss": 0.9483, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 2.407128169979438, |
|
"grad_norm": 0.4272879341834311, |
|
"learning_rate": 4.934900387533965e-05, |
|
"loss": 0.9434, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 2.412611377655929, |
|
"grad_norm": 0.46378214773916265, |
|
"learning_rate": 4.9199749668925076e-05, |
|
"loss": 0.9551, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.4180945853324194, |
|
"grad_norm": 0.46144056548958406, |
|
"learning_rate": 4.9050360097188904e-05, |
|
"loss": 0.952, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 2.42357779300891, |
|
"grad_norm": 0.4496399315688108, |
|
"learning_rate": 4.890083735825258e-05, |
|
"loss": 0.9414, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 2.429061000685401, |
|
"grad_norm": 0.5404123880896423, |
|
"learning_rate": 4.875118365219706e-05, |
|
"loss": 0.976, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 2.4345442083618916, |
|
"grad_norm": 0.6694234081039024, |
|
"learning_rate": 4.86014011810303e-05, |
|
"loss": 0.938, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 2.4400274160383826, |
|
"grad_norm": 0.7976721444374203, |
|
"learning_rate": 4.845149214865491e-05, |
|
"loss": 0.9428, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.445510623714873, |
|
"grad_norm": 0.8514351115579054, |
|
"learning_rate": 4.830145876083575e-05, |
|
"loss": 0.9716, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 2.450993831391364, |
|
"grad_norm": 0.8419233920727014, |
|
"learning_rate": 4.81513032251674e-05, |
|
"loss": 0.9375, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 2.4564770390678548, |
|
"grad_norm": 0.7939704221285054, |
|
"learning_rate": 4.8001027751041784e-05, |
|
"loss": 0.9539, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 2.4619602467443453, |
|
"grad_norm": 0.6506959170383838, |
|
"learning_rate": 4.785063454961557e-05, |
|
"loss": 0.974, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 2.4674434544208363, |
|
"grad_norm": 0.4039955860724156, |
|
"learning_rate": 4.7700125833777664e-05, |
|
"loss": 0.9409, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.472926662097327, |
|
"grad_norm": 0.33076049398826807, |
|
"learning_rate": 4.754950381811667e-05, |
|
"loss": 0.9561, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 2.4784098697738175, |
|
"grad_norm": 0.4841447525816379, |
|
"learning_rate": 4.7398770718888296e-05, |
|
"loss": 0.9557, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 2.4838930774503085, |
|
"grad_norm": 0.6029861941124044, |
|
"learning_rate": 4.724792875398271e-05, |
|
"loss": 0.9254, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 2.489376285126799, |
|
"grad_norm": 0.6018267599600162, |
|
"learning_rate": 4.7096980142891936e-05, |
|
"loss": 0.9336, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 2.49485949280329, |
|
"grad_norm": 0.4547177146031252, |
|
"learning_rate": 4.694592710667723e-05, |
|
"loss": 0.941, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.5003427004797807, |
|
"grad_norm": 0.2560704544963682, |
|
"learning_rate": 4.6794771867936286e-05, |
|
"loss": 0.9413, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 2.5058259081562713, |
|
"grad_norm": 0.30599482861554783, |
|
"learning_rate": 4.66435166507707e-05, |
|
"loss": 0.9605, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 2.5113091158327623, |
|
"grad_norm": 0.42429824249004056, |
|
"learning_rate": 4.6492163680753096e-05, |
|
"loss": 0.9403, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 2.516792323509253, |
|
"grad_norm": 0.476594932388952, |
|
"learning_rate": 4.634071518489443e-05, |
|
"loss": 0.9354, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 2.522275531185744, |
|
"grad_norm": 0.46789821860796127, |
|
"learning_rate": 4.618917339161125e-05, |
|
"loss": 0.9495, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.5277587388622345, |
|
"grad_norm": 0.3784567986912495, |
|
"learning_rate": 4.6037540530692905e-05, |
|
"loss": 0.958, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 2.533241946538725, |
|
"grad_norm": 0.3589933646618808, |
|
"learning_rate": 4.588581883326865e-05, |
|
"loss": 0.9449, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 2.538725154215216, |
|
"grad_norm": 0.4378215691524641, |
|
"learning_rate": 4.573401053177494e-05, |
|
"loss": 0.949, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 2.5442083618917066, |
|
"grad_norm": 0.4673331119883799, |
|
"learning_rate": 4.558211785992251e-05, |
|
"loss": 0.9555, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 2.549691569568197, |
|
"grad_norm": 0.4792027471063911, |
|
"learning_rate": 4.543014305266352e-05, |
|
"loss": 0.981, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.5551747772446882, |
|
"grad_norm": 0.4774897666673642, |
|
"learning_rate": 4.5278088346158665e-05, |
|
"loss": 0.9456, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 2.560657984921179, |
|
"grad_norm": 0.4425042364504943, |
|
"learning_rate": 4.512595597774427e-05, |
|
"loss": 0.9429, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 2.5661411925976694, |
|
"grad_norm": 0.39911527326216606, |
|
"learning_rate": 4.4973748185899416e-05, |
|
"loss": 0.9413, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 2.5716244002741604, |
|
"grad_norm": 0.29210171265761314, |
|
"learning_rate": 4.4821467210212924e-05, |
|
"loss": 0.9297, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 2.577107607950651, |
|
"grad_norm": 0.25565384930514384, |
|
"learning_rate": 4.4669115291350484e-05, |
|
"loss": 0.9454, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.582590815627142, |
|
"grad_norm": 0.2581461861152204, |
|
"learning_rate": 4.451669467102162e-05, |
|
"loss": 0.948, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 2.5880740233036326, |
|
"grad_norm": 0.29491880993583836, |
|
"learning_rate": 4.436420759194671e-05, |
|
"loss": 0.9381, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 2.5935572309801236, |
|
"grad_norm": 0.31658803555973, |
|
"learning_rate": 4.4211656297824064e-05, |
|
"loss": 0.9618, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 2.599040438656614, |
|
"grad_norm": 0.3245542687115596, |
|
"learning_rate": 4.4059043033296815e-05, |
|
"loss": 0.9488, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 2.6045236463331047, |
|
"grad_norm": 0.2905774382505926, |
|
"learning_rate": 4.390637004391993e-05, |
|
"loss": 0.9323, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.6100068540095958, |
|
"grad_norm": 0.2428082128722324, |
|
"learning_rate": 4.375363957612717e-05, |
|
"loss": 0.9549, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 2.6154900616860863, |
|
"grad_norm": 0.24550826505798423, |
|
"learning_rate": 4.360085387719806e-05, |
|
"loss": 0.9493, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 2.620973269362577, |
|
"grad_norm": 0.30222793473570203, |
|
"learning_rate": 4.344801519522478e-05, |
|
"loss": 0.9317, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 2.626456477039068, |
|
"grad_norm": 0.3497976704406349, |
|
"learning_rate": 4.32951257790791e-05, |
|
"loss": 0.9377, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 2.6319396847155585, |
|
"grad_norm": 1.2304346438090321, |
|
"learning_rate": 4.314218787837925e-05, |
|
"loss": 0.9442, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.637422892392049, |
|
"grad_norm": 0.2466343919140661, |
|
"learning_rate": 4.298920374345698e-05, |
|
"loss": 0.944, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 2.64290610006854, |
|
"grad_norm": 1.8185754976050628, |
|
"learning_rate": 4.283617562532421e-05, |
|
"loss": 0.9825, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 2.648389307745031, |
|
"grad_norm": 0.5797415336422694, |
|
"learning_rate": 4.2683105775640096e-05, |
|
"loss": 0.9565, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 2.6538725154215217, |
|
"grad_norm": 0.6585844779419097, |
|
"learning_rate": 4.2529996446677814e-05, |
|
"loss": 0.9349, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 2.6593557230980123, |
|
"grad_norm": 0.6364339320143122, |
|
"learning_rate": 4.237684989129146e-05, |
|
"loss": 0.9407, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.6648389307745033, |
|
"grad_norm": 0.7003736753475072, |
|
"learning_rate": 4.2223668362882846e-05, |
|
"loss": 0.952, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 2.670322138450994, |
|
"grad_norm": 0.6868570924103546, |
|
"learning_rate": 4.2070454115368385e-05, |
|
"loss": 0.9465, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 2.6758053461274844, |
|
"grad_norm": 0.8377123241663084, |
|
"learning_rate": 4.191720940314593e-05, |
|
"loss": 0.9365, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 2.6812885538039755, |
|
"grad_norm": 1.6613962498279427, |
|
"learning_rate": 4.176393648106161e-05, |
|
"loss": 0.9592, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 2.686771761480466, |
|
"grad_norm": 0.9696773125080363, |
|
"learning_rate": 4.1610637604376614e-05, |
|
"loss": 0.9526, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.6922549691569566, |
|
"grad_norm": 1.0222168588069447, |
|
"learning_rate": 4.1457315028734015e-05, |
|
"loss": 0.9736, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 2.6977381768334476, |
|
"grad_norm": 0.9553354559606164, |
|
"learning_rate": 4.13039710101256e-05, |
|
"loss": 0.9668, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.703221384509938, |
|
"grad_norm": 0.8745999789478393, |
|
"learning_rate": 4.11506078048587e-05, |
|
"loss": 0.9716, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.7087045921864292, |
|
"grad_norm": 0.7411939891535938, |
|
"learning_rate": 4.0997227669522924e-05, |
|
"loss": 0.9602, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.71418779986292, |
|
"grad_norm": 0.6053293148082218, |
|
"learning_rate": 4.0843832860956994e-05, |
|
"loss": 0.9321, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.719671007539411, |
|
"grad_norm": 0.6015174283144377, |
|
"learning_rate": 4.069042563621555e-05, |
|
"loss": 0.9556, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.7251542152159014, |
|
"grad_norm": 0.5341988781830062, |
|
"learning_rate": 4.0537008252535904e-05, |
|
"loss": 0.9502, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.730637422892392, |
|
"grad_norm": 0.3986261784888644, |
|
"learning_rate": 4.0383582967304865e-05, |
|
"loss": 0.9498, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.736120630568883, |
|
"grad_norm": 0.4491497759193806, |
|
"learning_rate": 4.023015203802551e-05, |
|
"loss": 0.9578, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.7416038382453736, |
|
"grad_norm": 0.42597854684305547, |
|
"learning_rate": 4.0076717722283936e-05, |
|
"loss": 0.9258, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.747087045921864, |
|
"grad_norm": 0.3674427892141133, |
|
"learning_rate": 3.992328227771608e-05, |
|
"loss": 0.956, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.752570253598355, |
|
"grad_norm": 0.3441073888754271, |
|
"learning_rate": 3.976984796197451e-05, |
|
"loss": 0.9347, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.7580534612748457, |
|
"grad_norm": 0.5779083792402964, |
|
"learning_rate": 3.961641703269514e-05, |
|
"loss": 0.9591, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.7635366689513363, |
|
"grad_norm": 0.45842637741331516, |
|
"learning_rate": 3.946299174746411e-05, |
|
"loss": 0.9497, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.7690198766278273, |
|
"grad_norm": 0.42003328606156265, |
|
"learning_rate": 3.9309574363784465e-05, |
|
"loss": 0.9365, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.774503084304318, |
|
"grad_norm": 0.3971675276536041, |
|
"learning_rate": 3.915616713904302e-05, |
|
"loss": 0.9532, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.779986291980809, |
|
"grad_norm": 0.3738039726781753, |
|
"learning_rate": 3.9002772330477096e-05, |
|
"loss": 0.9505, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.7854694996572995, |
|
"grad_norm": 0.3490205849659376, |
|
"learning_rate": 3.884939219514132e-05, |
|
"loss": 0.9383, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.7909527073337905, |
|
"grad_norm": 0.6333313596692628, |
|
"learning_rate": 3.869602898987441e-05, |
|
"loss": 0.9528, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.796435915010281, |
|
"grad_norm": 0.3242106302112962, |
|
"learning_rate": 3.854268497126601e-05, |
|
"loss": 0.9554, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.8019191226867717, |
|
"grad_norm": 0.2849035104268222, |
|
"learning_rate": 3.8389362395623406e-05, |
|
"loss": 0.9477, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.8074023303632627, |
|
"grad_norm": 0.23339126446276168, |
|
"learning_rate": 3.8236063518938405e-05, |
|
"loss": 0.9401, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.8128855380397533, |
|
"grad_norm": 0.2658559032559636, |
|
"learning_rate": 3.8082790596854075e-05, |
|
"loss": 0.9322, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.818368745716244, |
|
"grad_norm": 0.28632019300770967, |
|
"learning_rate": 3.792954588463162e-05, |
|
"loss": 0.9647, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.823851953392735, |
|
"grad_norm": 0.2585763044971737, |
|
"learning_rate": 3.777633163711716e-05, |
|
"loss": 0.9505, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.8293351610692254, |
|
"grad_norm": 0.2742118891600389, |
|
"learning_rate": 3.7623150108708546e-05, |
|
"loss": 0.9446, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.834818368745716, |
|
"grad_norm": 0.2344118819440086, |
|
"learning_rate": 3.7470003553322186e-05, |
|
"loss": 0.9309, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.840301576422207, |
|
"grad_norm": 0.5837521053187771, |
|
"learning_rate": 3.7316894224359904e-05, |
|
"loss": 0.9394, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.8457847840986976, |
|
"grad_norm": 0.31498022974880685, |
|
"learning_rate": 3.71638243746758e-05, |
|
"loss": 0.9321, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.8512679917751886, |
|
"grad_norm": 0.5111390953286222, |
|
"learning_rate": 3.7010796256543034e-05, |
|
"loss": 0.9565, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.856751199451679, |
|
"grad_norm": 0.24452242688766815, |
|
"learning_rate": 3.6857812121620756e-05, |
|
"loss": 0.938, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.8622344071281702, |
|
"grad_norm": 0.263705639317996, |
|
"learning_rate": 3.670487422092092e-05, |
|
"loss": 0.9434, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.867717614804661, |
|
"grad_norm": 0.25074905428863853, |
|
"learning_rate": 3.655198480477523e-05, |
|
"loss": 0.9514, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.8732008224811514, |
|
"grad_norm": 0.23134392999771497, |
|
"learning_rate": 3.639914612280194e-05, |
|
"loss": 0.9437, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.8786840301576424, |
|
"grad_norm": 0.34358764407986175, |
|
"learning_rate": 3.6246360423872834e-05, |
|
"loss": 0.9518, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.884167237834133, |
|
"grad_norm": 0.21629316436514154, |
|
"learning_rate": 3.609362995608008e-05, |
|
"loss": 0.9572, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.8896504455106236, |
|
"grad_norm": 0.22317832298096357, |
|
"learning_rate": 3.59409569667032e-05, |
|
"loss": 0.9492, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.8951336531871146, |
|
"grad_norm": 0.20759859863486543, |
|
"learning_rate": 3.578834370217595e-05, |
|
"loss": 0.9355, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.900616860863605, |
|
"grad_norm": 0.2138741670103374, |
|
"learning_rate": 3.5635792408053304e-05, |
|
"loss": 0.9526, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.9061000685400957, |
|
"grad_norm": 0.2071241548771815, |
|
"learning_rate": 3.54833053289784e-05, |
|
"loss": 0.9459, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.9115832762165867, |
|
"grad_norm": 0.24989924945583794, |
|
"learning_rate": 3.533088470864953e-05, |
|
"loss": 0.9402, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.9170664838930773, |
|
"grad_norm": 0.27509189913976284, |
|
"learning_rate": 3.517853278978708e-05, |
|
"loss": 0.9554, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.9225496915695683, |
|
"grad_norm": 0.23063790000408171, |
|
"learning_rate": 3.5026251814100604e-05, |
|
"loss": 0.9457, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.928032899246059, |
|
"grad_norm": 0.2166555423682174, |
|
"learning_rate": 3.487404402225574e-05, |
|
"loss": 0.9272, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.93351610692255, |
|
"grad_norm": 0.21899933270897742, |
|
"learning_rate": 3.4721911653841355e-05, |
|
"loss": 0.9507, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.9389993145990405, |
|
"grad_norm": 0.19354444875830706, |
|
"learning_rate": 3.45698569473365e-05, |
|
"loss": 0.9682, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.944482522275531, |
|
"grad_norm": 0.17844494801443178, |
|
"learning_rate": 3.44178821400775e-05, |
|
"loss": 0.9473, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.949965729952022, |
|
"grad_norm": 1.3083812330864948, |
|
"learning_rate": 3.426598946822507e-05, |
|
"loss": 0.9461, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.9554489376285127, |
|
"grad_norm": 0.21349382766535052, |
|
"learning_rate": 3.4114181166731355e-05, |
|
"loss": 0.9238, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.9609321453050033, |
|
"grad_norm": 0.2740853283765663, |
|
"learning_rate": 3.39624594693071e-05, |
|
"loss": 0.9483, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.9664153529814943, |
|
"grad_norm": 0.2604547593626456, |
|
"learning_rate": 3.381082660838875e-05, |
|
"loss": 0.9566, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.971898560657985, |
|
"grad_norm": 0.2636302620609088, |
|
"learning_rate": 3.365928481510558e-05, |
|
"loss": 0.9529, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.9773817683344754, |
|
"grad_norm": 0.24591140742789205, |
|
"learning_rate": 3.3507836319246924e-05, |
|
"loss": 0.9327, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.9828649760109665, |
|
"grad_norm": 0.23336910029530064, |
|
"learning_rate": 3.33564833492293e-05, |
|
"loss": 0.9606, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.988348183687457, |
|
"grad_norm": 0.215970712690598, |
|
"learning_rate": 3.3205228132063714e-05, |
|
"loss": 0.9495, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.993831391363948, |
|
"grad_norm": 0.23530537682003938, |
|
"learning_rate": 3.305407289332279e-05, |
|
"loss": 0.9533, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.9993145990404386, |
|
"grad_norm": 0.35138852092483364, |
|
"learning_rate": 3.290301985710807e-05, |
|
"loss": 1.3456, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 3.004797806716929, |
|
"grad_norm": 0.5481962224464548, |
|
"learning_rate": 3.27520712460173e-05, |
|
"loss": 1.1027, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 3.01028101439342, |
|
"grad_norm": 0.7662140587544488, |
|
"learning_rate": 3.260122928111172e-05, |
|
"loss": 0.8979, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 3.015764222069911, |
|
"grad_norm": 0.9454349543994065, |
|
"learning_rate": 3.245049618188334e-05, |
|
"loss": 0.9039, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.021247429746402, |
|
"grad_norm": 0.7559182562175768, |
|
"learning_rate": 3.229987416622235e-05, |
|
"loss": 0.9046, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 3.0267306374228924, |
|
"grad_norm": 0.6019789035769177, |
|
"learning_rate": 3.2149365450384445e-05, |
|
"loss": 0.8982, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 3.032213845099383, |
|
"grad_norm": 0.608624397941904, |
|
"learning_rate": 3.199897224895823e-05, |
|
"loss": 0.9024, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 3.037697052775874, |
|
"grad_norm": 1.3331460907440373, |
|
"learning_rate": 3.184869677483261e-05, |
|
"loss": 0.9183, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 3.0431802604523646, |
|
"grad_norm": 0.5617254798828216, |
|
"learning_rate": 3.169854123916426e-05, |
|
"loss": 0.9044, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 3.0486634681288556, |
|
"grad_norm": 0.6117567607126662, |
|
"learning_rate": 3.1548507851345094e-05, |
|
"loss": 0.9084, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 3.054146675805346, |
|
"grad_norm": 0.5486288335887587, |
|
"learning_rate": 3.139859881896971e-05, |
|
"loss": 0.9086, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 3.0596298834818367, |
|
"grad_norm": 0.43883951251804787, |
|
"learning_rate": 3.124881634780295e-05, |
|
"loss": 0.8822, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 3.0651130911583278, |
|
"grad_norm": 0.375243221483072, |
|
"learning_rate": 3.109916264174743e-05, |
|
"loss": 0.9089, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 3.0705962988348183, |
|
"grad_norm": 0.45192811672936717, |
|
"learning_rate": 3.094963990281112e-05, |
|
"loss": 0.8979, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.076079506511309, |
|
"grad_norm": 0.3691771282212795, |
|
"learning_rate": 3.080025033107494e-05, |
|
"loss": 0.8973, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 3.0815627141878, |
|
"grad_norm": 0.28371809189511954, |
|
"learning_rate": 3.065099612466037e-05, |
|
"loss": 0.887, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 3.0870459218642905, |
|
"grad_norm": 0.33397747859909344, |
|
"learning_rate": 3.0501879479697112e-05, |
|
"loss": 0.9324, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 3.0925291295407815, |
|
"grad_norm": 8.830372813312158, |
|
"learning_rate": 3.035290259029083e-05, |
|
"loss": 0.9137, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 3.098012337217272, |
|
"grad_norm": 0.6793506372960832, |
|
"learning_rate": 3.0204067648490766e-05, |
|
"loss": 0.9178, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 3.1034955448937627, |
|
"grad_norm": 0.8864777488999107, |
|
"learning_rate": 3.00553768442576e-05, |
|
"loss": 0.8926, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 3.1089787525702537, |
|
"grad_norm": 0.6288925135464869, |
|
"learning_rate": 2.9906832365431132e-05, |
|
"loss": 0.9198, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 3.1144619602467443, |
|
"grad_norm": 0.40885944277256037, |
|
"learning_rate": 2.9758436397698118e-05, |
|
"loss": 0.9127, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 3.1199451679232353, |
|
"grad_norm": 0.7284002718913494, |
|
"learning_rate": 2.961019112456014e-05, |
|
"loss": 0.882, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 3.125428375599726, |
|
"grad_norm": 0.5837455202754323, |
|
"learning_rate": 2.946209872730145e-05, |
|
"loss": 0.9078, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.1309115832762164, |
|
"grad_norm": 0.33186964600527347, |
|
"learning_rate": 2.931416138495689e-05, |
|
"loss": 0.8925, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 3.1363947909527075, |
|
"grad_norm": 0.53882269291948, |
|
"learning_rate": 2.9166381274279803e-05, |
|
"loss": 0.9042, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 3.141877998629198, |
|
"grad_norm": 0.46331612230401065, |
|
"learning_rate": 2.901876056971002e-05, |
|
"loss": 0.9217, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 3.147361206305689, |
|
"grad_norm": 0.31771843320448484, |
|
"learning_rate": 2.88713014433419e-05, |
|
"loss": 0.9018, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 3.1528444139821796, |
|
"grad_norm": 0.49472378335259276, |
|
"learning_rate": 2.8724006064892296e-05, |
|
"loss": 0.8748, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 3.15832762165867, |
|
"grad_norm": 0.3530540111863458, |
|
"learning_rate": 2.85768766016687e-05, |
|
"loss": 0.8957, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 3.1638108293351612, |
|
"grad_norm": 0.3023194345015207, |
|
"learning_rate": 2.8429915218537297e-05, |
|
"loss": 0.9023, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 3.169294037011652, |
|
"grad_norm": 0.39598120224098793, |
|
"learning_rate": 2.8283124077891167e-05, |
|
"loss": 0.8928, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 3.1747772446881424, |
|
"grad_norm": 0.2762440885300842, |
|
"learning_rate": 2.813650533961843e-05, |
|
"loss": 0.8986, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 3.1802604523646334, |
|
"grad_norm": 0.2731980966351029, |
|
"learning_rate": 2.7990061161070445e-05, |
|
"loss": 0.9012, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 3.185743660041124, |
|
"grad_norm": 0.41348684607047675, |
|
"learning_rate": 2.7843793697030128e-05, |
|
"loss": 0.9069, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 3.191226867717615, |
|
"grad_norm": 0.25162826940534694, |
|
"learning_rate": 2.7697705099680217e-05, |
|
"loss": 0.9061, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 3.1967100753941056, |
|
"grad_norm": 0.2506514671680193, |
|
"learning_rate": 2.7551797518571573e-05, |
|
"loss": 0.8851, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 3.202193283070596, |
|
"grad_norm": 0.2659325922311813, |
|
"learning_rate": 2.7406073100591605e-05, |
|
"loss": 0.8999, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 3.207676490747087, |
|
"grad_norm": 0.22833189604550308, |
|
"learning_rate": 2.7260533989932628e-05, |
|
"loss": 0.8804, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 3.2131596984235777, |
|
"grad_norm": 0.26002377936612986, |
|
"learning_rate": 2.7115182328060385e-05, |
|
"loss": 0.8987, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 3.2186429061000688, |
|
"grad_norm": 0.24416881973039506, |
|
"learning_rate": 2.697002025368245e-05, |
|
"loss": 0.8982, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 3.2241261137765593, |
|
"grad_norm": 0.22174095398609206, |
|
"learning_rate": 2.682504990271682e-05, |
|
"loss": 0.8962, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 3.22960932145305, |
|
"grad_norm": 0.2715414538357121, |
|
"learning_rate": 2.668027340826048e-05, |
|
"loss": 0.8896, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 3.235092529129541, |
|
"grad_norm": 0.2765507918752038, |
|
"learning_rate": 2.653569290055799e-05, |
|
"loss": 0.8938, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 3.2405757368060315, |
|
"grad_norm": 0.20005217231999928, |
|
"learning_rate": 2.6391310506970147e-05, |
|
"loss": 0.8827, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 3.246058944482522, |
|
"grad_norm": 0.8704396300357945, |
|
"learning_rate": 2.6247128351942726e-05, |
|
"loss": 0.9075, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 3.251542152159013, |
|
"grad_norm": 0.3586781432686027, |
|
"learning_rate": 2.6103148556975173e-05, |
|
"loss": 0.8919, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 3.2570253598355037, |
|
"grad_norm": 0.3879500657373189, |
|
"learning_rate": 2.5959373240589382e-05, |
|
"loss": 0.906, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 3.2625085675119947, |
|
"grad_norm": 0.3206624742458832, |
|
"learning_rate": 2.5815804518298575e-05, |
|
"loss": 0.9046, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 3.2679917751884853, |
|
"grad_norm": 0.5655322768937269, |
|
"learning_rate": 2.5672444502576122e-05, |
|
"loss": 0.8792, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 3.273474982864976, |
|
"grad_norm": 0.26423994402378853, |
|
"learning_rate": 2.55292953028245e-05, |
|
"loss": 0.9029, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 3.278958190541467, |
|
"grad_norm": 0.3568680800314573, |
|
"learning_rate": 2.53863590253442e-05, |
|
"loss": 0.9097, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 3.2844413982179574, |
|
"grad_norm": 0.2876917661506257, |
|
"learning_rate": 2.5243637773302817e-05, |
|
"loss": 0.9039, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 3.2899246058944485, |
|
"grad_norm": 0.2543858700320745, |
|
"learning_rate": 2.510113364670403e-05, |
|
"loss": 0.8974, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.295407813570939, |
|
"grad_norm": 0.29660460583308934, |
|
"learning_rate": 2.4958848742356724e-05, |
|
"loss": 0.9085, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 3.3008910212474296, |
|
"grad_norm": 0.21363483198577174, |
|
"learning_rate": 2.481678515384415e-05, |
|
"loss": 0.9022, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 3.3063742289239206, |
|
"grad_norm": 0.2406946488569973, |
|
"learning_rate": 2.4674944971493123e-05, |
|
"loss": 0.9017, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 3.311857436600411, |
|
"grad_norm": 0.25257504290451166, |
|
"learning_rate": 2.453333028234325e-05, |
|
"loss": 0.913, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 3.317340644276902, |
|
"grad_norm": 0.17923139676357774, |
|
"learning_rate": 2.439194317011622e-05, |
|
"loss": 0.9097, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 3.322823851953393, |
|
"grad_norm": 0.2282134057524544, |
|
"learning_rate": 2.4250785715185138e-05, |
|
"loss": 0.8902, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 3.3283070596298834, |
|
"grad_norm": 0.19939767203864542, |
|
"learning_rate": 2.410985999454396e-05, |
|
"loss": 0.8929, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 3.3337902673063744, |
|
"grad_norm": 0.1962097138596388, |
|
"learning_rate": 2.3969168081776867e-05, |
|
"loss": 0.8816, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 3.339273474982865, |
|
"grad_norm": 0.2408654729001506, |
|
"learning_rate": 2.382871204702781e-05, |
|
"loss": 0.8904, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 3.3447566826593556, |
|
"grad_norm": 0.1857502171735821, |
|
"learning_rate": 2.3688493956969997e-05, |
|
"loss": 0.9253, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 3.3502398903358466, |
|
"grad_norm": 0.1967645774870265, |
|
"learning_rate": 2.3548515874775547e-05, |
|
"loss": 0.8996, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 3.355723098012337, |
|
"grad_norm": 0.19126170344434232, |
|
"learning_rate": 2.340877986008507e-05, |
|
"loss": 0.9036, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 3.361206305688828, |
|
"grad_norm": 0.17261773740991576, |
|
"learning_rate": 2.3269287968977406e-05, |
|
"loss": 0.8974, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 3.3666895133653187, |
|
"grad_norm": 0.1828461955951697, |
|
"learning_rate": 2.3130042253939334e-05, |
|
"loss": 0.9053, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 3.3721727210418093, |
|
"grad_norm": 0.1787029837346015, |
|
"learning_rate": 2.2991044763835438e-05, |
|
"loss": 0.8796, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 3.3776559287183003, |
|
"grad_norm": 0.1782795515564827, |
|
"learning_rate": 2.285229754387783e-05, |
|
"loss": 0.9047, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 3.383139136394791, |
|
"grad_norm": 0.1808046435484797, |
|
"learning_rate": 2.2713802635596246e-05, |
|
"loss": 0.9014, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 3.3886223440712815, |
|
"grad_norm": 0.16986006910284632, |
|
"learning_rate": 2.2575562076807857e-05, |
|
"loss": 0.9182, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 3.3941055517477725, |
|
"grad_norm": 0.17065356826960065, |
|
"learning_rate": 2.2437577901587284e-05, |
|
"loss": 0.9148, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 3.399588759424263, |
|
"grad_norm": 0.1821028903512155, |
|
"learning_rate": 2.22998521402368e-05, |
|
"loss": 0.892, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.405071967100754, |
|
"grad_norm": 0.16756736884692283, |
|
"learning_rate": 2.216238681925628e-05, |
|
"loss": 0.9055, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 3.4105551747772447, |
|
"grad_norm": 0.19298599105907382, |
|
"learning_rate": 2.2025183961313542e-05, |
|
"loss": 0.905, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 3.4160383824537353, |
|
"grad_norm": 0.17599992995668898, |
|
"learning_rate": 2.188824558521452e-05, |
|
"loss": 0.9076, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 3.4215215901302263, |
|
"grad_norm": 0.17170320698537844, |
|
"learning_rate": 2.175157370587348e-05, |
|
"loss": 0.9036, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 3.427004797806717, |
|
"grad_norm": 0.19564360580394577, |
|
"learning_rate": 2.1615170334283535e-05, |
|
"loss": 0.8988, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.432488005483208, |
|
"grad_norm": 0.15688766150962508, |
|
"learning_rate": 2.1479037477486936e-05, |
|
"loss": 0.8866, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 3.4379712131596984, |
|
"grad_norm": 0.1814220481133191, |
|
"learning_rate": 2.1343177138545547e-05, |
|
"loss": 0.8957, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 3.443454420836189, |
|
"grad_norm": 0.19751191092231474, |
|
"learning_rate": 2.1207591316511454e-05, |
|
"loss": 0.8952, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 3.44893762851268, |
|
"grad_norm": 0.1646360237105222, |
|
"learning_rate": 2.1072282006397425e-05, |
|
"loss": 0.8953, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 3.4544208361891706, |
|
"grad_norm": 0.18843073086840514, |
|
"learning_rate": 2.0937251199147684e-05, |
|
"loss": 0.9038, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.459904043865661, |
|
"grad_norm": 0.17451033699060078, |
|
"learning_rate": 2.0802500881608557e-05, |
|
"loss": 0.9059, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 3.465387251542152, |
|
"grad_norm": 0.15864010191777436, |
|
"learning_rate": 2.066803303649918e-05, |
|
"loss": 0.8736, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 3.470870459218643, |
|
"grad_norm": 0.1745241432795982, |
|
"learning_rate": 2.0533849642382446e-05, |
|
"loss": 0.8966, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 3.476353666895134, |
|
"grad_norm": 0.15563829064977594, |
|
"learning_rate": 2.039995267363581e-05, |
|
"loss": 0.9013, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 3.4818368745716244, |
|
"grad_norm": 0.18758674942103779, |
|
"learning_rate": 2.026634410042223e-05, |
|
"loss": 0.8947, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 3.487320082248115, |
|
"grad_norm": 0.15887193712810688, |
|
"learning_rate": 2.0133025888661263e-05, |
|
"loss": 0.9054, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 3.492803289924606, |
|
"grad_norm": 0.17452023322549837, |
|
"learning_rate": 2.0000000000000012e-05, |
|
"loss": 0.9076, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 3.4982864976010966, |
|
"grad_norm": 0.18330071982251422, |
|
"learning_rate": 1.986726839178438e-05, |
|
"loss": 0.9015, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 3.5037697052775876, |
|
"grad_norm": 0.1575715551896094, |
|
"learning_rate": 1.9734833017030227e-05, |
|
"loss": 0.8813, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 3.509252912954078, |
|
"grad_norm": 0.18878562836114246, |
|
"learning_rate": 1.9602695824394576e-05, |
|
"loss": 0.9005, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.5147361206305687, |
|
"grad_norm": 0.15533387161156822, |
|
"learning_rate": 1.9470858758147036e-05, |
|
"loss": 0.8757, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 3.5202193283070597, |
|
"grad_norm": 0.1630547237218842, |
|
"learning_rate": 1.933932375814114e-05, |
|
"loss": 0.9038, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 3.5257025359835503, |
|
"grad_norm": 0.1712109811813745, |
|
"learning_rate": 1.9208092759785818e-05, |
|
"loss": 0.9049, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 3.531185743660041, |
|
"grad_norm": 0.14587673483344632, |
|
"learning_rate": 1.9077167694016903e-05, |
|
"loss": 0.8985, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 3.536668951336532, |
|
"grad_norm": 0.16414938500317564, |
|
"learning_rate": 1.8946550487268706e-05, |
|
"loss": 0.9068, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 3.5421521590130225, |
|
"grad_norm": 0.15376745325776353, |
|
"learning_rate": 1.8816243061445734e-05, |
|
"loss": 0.8955, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 3.5476353666895135, |
|
"grad_norm": 0.15960811991170182, |
|
"learning_rate": 1.8686247333894366e-05, |
|
"loss": 0.8967, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 3.553118574366004, |
|
"grad_norm": 0.15855046926600352, |
|
"learning_rate": 1.8556565217374606e-05, |
|
"loss": 0.8951, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 3.558601782042495, |
|
"grad_norm": 0.15624502487015215, |
|
"learning_rate": 1.8427198620032037e-05, |
|
"loss": 0.9061, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 3.5640849897189857, |
|
"grad_norm": 0.1679249396514824, |
|
"learning_rate": 1.829814944536963e-05, |
|
"loss": 0.9094, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.5695681973954763, |
|
"grad_norm": 0.15999412608591182, |
|
"learning_rate": 1.8169419592219813e-05, |
|
"loss": 0.8991, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 3.5750514050719673, |
|
"grad_norm": 0.17340179382194568, |
|
"learning_rate": 1.8041010954716544e-05, |
|
"loss": 0.9103, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 3.580534612748458, |
|
"grad_norm": 0.15237872236931496, |
|
"learning_rate": 1.7912925422267345e-05, |
|
"loss": 0.8999, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 3.5860178204249484, |
|
"grad_norm": 1.2272413852202702, |
|
"learning_rate": 1.7785164879525604e-05, |
|
"loss": 0.9349, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 3.5915010281014395, |
|
"grad_norm": 0.17476206012460227, |
|
"learning_rate": 1.7657731206362813e-05, |
|
"loss": 0.9132, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 3.59698423577793, |
|
"grad_norm": 0.1762962782430207, |
|
"learning_rate": 1.7530626277840846e-05, |
|
"loss": 0.8938, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 3.6024674434544206, |
|
"grad_norm": 0.181015770218541, |
|
"learning_rate": 1.7403851964184486e-05, |
|
"loss": 0.9069, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 3.6079506511309116, |
|
"grad_norm": 0.16366717982789378, |
|
"learning_rate": 1.7277410130753775e-05, |
|
"loss": 0.8954, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 3.613433858807402, |
|
"grad_norm": 0.1612334600367671, |
|
"learning_rate": 1.7151302638016683e-05, |
|
"loss": 0.884, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 3.618917066483893, |
|
"grad_norm": 0.16685277983540153, |
|
"learning_rate": 1.7025531341521685e-05, |
|
"loss": 0.894, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.624400274160384, |
|
"grad_norm": 0.1669827992942319, |
|
"learning_rate": 1.690009809187041e-05, |
|
"loss": 0.8952, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 3.629883481836875, |
|
"grad_norm": 0.15253380478976128, |
|
"learning_rate": 1.6775004734690495e-05, |
|
"loss": 0.8864, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 3.6353666895133654, |
|
"grad_norm": 0.15609996007733176, |
|
"learning_rate": 1.6650253110608415e-05, |
|
"loss": 0.9006, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 3.640849897189856, |
|
"grad_norm": 0.1461455903080232, |
|
"learning_rate": 1.6525845055222306e-05, |
|
"loss": 0.917, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 3.646333104866347, |
|
"grad_norm": 0.15716311122738527, |
|
"learning_rate": 1.6401782399075098e-05, |
|
"loss": 0.9149, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 3.6518163125428376, |
|
"grad_norm": 0.32627646467352905, |
|
"learning_rate": 1.627806696762745e-05, |
|
"loss": 0.9054, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 3.657299520219328, |
|
"grad_norm": 0.16854342857121438, |
|
"learning_rate": 1.615470058123099e-05, |
|
"loss": 0.8805, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 3.662782727895819, |
|
"grad_norm": 0.1747226962814483, |
|
"learning_rate": 1.603168505510148e-05, |
|
"loss": 0.9052, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 3.6682659355723097, |
|
"grad_norm": 0.16903191589677927, |
|
"learning_rate": 1.5909022199292104e-05, |
|
"loss": 0.8961, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 3.6737491432488003, |
|
"grad_norm": 8.87107113952348, |
|
"learning_rate": 1.5786713818666876e-05, |
|
"loss": 0.9752, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.6792323509252913, |
|
"grad_norm": 0.20674605140421184, |
|
"learning_rate": 1.566476171287401e-05, |
|
"loss": 0.8898, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 3.684715558601782, |
|
"grad_norm": 0.19629370706239815, |
|
"learning_rate": 1.554316767631951e-05, |
|
"loss": 0.9059, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 3.690198766278273, |
|
"grad_norm": 0.21259237974634135, |
|
"learning_rate": 1.5421933498140763e-05, |
|
"loss": 0.8958, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 3.6956819739547635, |
|
"grad_norm": 0.18310426924162343, |
|
"learning_rate": 1.5301060962180133e-05, |
|
"loss": 0.8853, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 3.7011651816312545, |
|
"grad_norm": 0.1751171847378099, |
|
"learning_rate": 1.518055184695884e-05, |
|
"loss": 0.8813, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.706648389307745, |
|
"grad_norm": 0.16886538069007115, |
|
"learning_rate": 1.5060407925650662e-05, |
|
"loss": 0.8881, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 3.7121315969842357, |
|
"grad_norm": 0.17046906947879148, |
|
"learning_rate": 1.494063096605595e-05, |
|
"loss": 0.8937, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 3.7176148046607267, |
|
"grad_norm": 0.1581379052592717, |
|
"learning_rate": 1.4821222730575561e-05, |
|
"loss": 0.8972, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 3.7230980123372173, |
|
"grad_norm": 0.6969777085406519, |
|
"learning_rate": 1.4702184976184915e-05, |
|
"loss": 0.9041, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 3.728581220013708, |
|
"grad_norm": 0.2182887056206488, |
|
"learning_rate": 1.4583519454408191e-05, |
|
"loss": 0.9039, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.734064427690199, |
|
"grad_norm": 0.22945666556054492, |
|
"learning_rate": 1.4465227911292537e-05, |
|
"loss": 0.8887, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 3.7395476353666894, |
|
"grad_norm": 0.22736819225186297, |
|
"learning_rate": 1.434731208738232e-05, |
|
"loss": 0.9121, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 3.74503084304318, |
|
"grad_norm": 0.20165874554977742, |
|
"learning_rate": 1.4229773717693625e-05, |
|
"loss": 0.8965, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 3.750514050719671, |
|
"grad_norm": 0.184903868108441, |
|
"learning_rate": 1.4112614531688645e-05, |
|
"loss": 0.9045, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 3.7559972583961616, |
|
"grad_norm": 0.18326386101695807, |
|
"learning_rate": 1.3995836253250233e-05, |
|
"loss": 0.8995, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.7614804660726526, |
|
"grad_norm": 0.20107358682198542, |
|
"learning_rate": 1.3879440600656607e-05, |
|
"loss": 0.9072, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 3.766963673749143, |
|
"grad_norm": 0.3569275389027143, |
|
"learning_rate": 1.3763429286555963e-05, |
|
"loss": 0.9048, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 3.7724468814256342, |
|
"grad_norm": 0.19199246127012237, |
|
"learning_rate": 1.3647804017941373e-05, |
|
"loss": 0.8941, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 3.777930089102125, |
|
"grad_norm": 0.19373559370826268, |
|
"learning_rate": 1.3532566496125634e-05, |
|
"loss": 0.9014, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 3.7834132967786154, |
|
"grad_norm": 0.180911982737044, |
|
"learning_rate": 1.3417718416716183e-05, |
|
"loss": 0.8911, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.7888965044551064, |
|
"grad_norm": 0.17759995896183428, |
|
"learning_rate": 1.3303261469590228e-05, |
|
"loss": 0.8932, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 3.794379712131597, |
|
"grad_norm": 0.159075207881893, |
|
"learning_rate": 1.3189197338869853e-05, |
|
"loss": 0.882, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 3.7998629198080875, |
|
"grad_norm": 0.18045574791107008, |
|
"learning_rate": 1.3075527702897185e-05, |
|
"loss": 0.9228, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 3.8053461274845786, |
|
"grad_norm": 0.16316813300832433, |
|
"learning_rate": 1.2962254234209826e-05, |
|
"loss": 0.8894, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 3.810829335161069, |
|
"grad_norm": 0.17660313697972807, |
|
"learning_rate": 1.2849378599516085e-05, |
|
"loss": 0.8997, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 3.8163125428375597, |
|
"grad_norm": 0.1679766974660261, |
|
"learning_rate": 1.273690245967059e-05, |
|
"loss": 0.9042, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 3.8217957505140507, |
|
"grad_norm": 0.15705812295975527, |
|
"learning_rate": 1.2624827469649739e-05, |
|
"loss": 0.8959, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 3.8272789581905413, |
|
"grad_norm": 0.1600279121047265, |
|
"learning_rate": 1.2513155278527446e-05, |
|
"loss": 0.8942, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 3.8327621658670323, |
|
"grad_norm": 0.14891745897684322, |
|
"learning_rate": 1.240188752945084e-05, |
|
"loss": 0.9133, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 3.838245373543523, |
|
"grad_norm": 1.0164234645051264, |
|
"learning_rate": 1.2291025859616026e-05, |
|
"loss": 0.8963, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.843728581220014, |
|
"grad_norm": 0.15287661592525834, |
|
"learning_rate": 1.21805719002441e-05, |
|
"loss": 0.896, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 3.8492117888965045, |
|
"grad_norm": 0.18515627168326385, |
|
"learning_rate": 1.2070527276557092e-05, |
|
"loss": 0.8907, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 3.854694996572995, |
|
"grad_norm": 0.19538289491293945, |
|
"learning_rate": 1.1960893607754022e-05, |
|
"loss": 0.8866, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 3.860178204249486, |
|
"grad_norm": 0.1803420043271255, |
|
"learning_rate": 1.1851672506987165e-05, |
|
"loss": 0.9027, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 3.8656614119259767, |
|
"grad_norm": 0.1728624061069401, |
|
"learning_rate": 1.17428655813382e-05, |
|
"loss": 0.9088, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 3.8711446196024673, |
|
"grad_norm": 0.18740842748319184, |
|
"learning_rate": 1.1634474431794676e-05, |
|
"loss": 0.9012, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 3.8766278272789583, |
|
"grad_norm": 0.16138769768933767, |
|
"learning_rate": 1.1526500653226385e-05, |
|
"loss": 0.9168, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 3.882111034955449, |
|
"grad_norm": 0.14601780097035175, |
|
"learning_rate": 1.141894583436189e-05, |
|
"loss": 0.8923, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 3.8875942426319394, |
|
"grad_norm": 0.16268490177886352, |
|
"learning_rate": 1.1311811557765208e-05, |
|
"loss": 0.9011, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 3.8930774503084304, |
|
"grad_norm": 0.14407863627045317, |
|
"learning_rate": 1.1205099399812478e-05, |
|
"loss": 0.8937, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.898560657984921, |
|
"grad_norm": 0.14837780514070958, |
|
"learning_rate": 1.1098810930668754e-05, |
|
"loss": 0.9035, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 3.904043865661412, |
|
"grad_norm": 0.13890190728316182, |
|
"learning_rate": 1.0992947714264952e-05, |
|
"loss": 0.8925, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 3.9095270733379026, |
|
"grad_norm": 0.1482520122684546, |
|
"learning_rate": 1.088751130827478e-05, |
|
"loss": 0.8864, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 3.9150102810143936, |
|
"grad_norm": 0.19175152200646928, |
|
"learning_rate": 1.078250326409188e-05, |
|
"loss": 0.9022, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 3.920493488690884, |
|
"grad_norm": 0.14176488398671347, |
|
"learning_rate": 1.0677925126806956e-05, |
|
"loss": 0.9017, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 3.925976696367375, |
|
"grad_norm": 0.14891595552628026, |
|
"learning_rate": 1.0573778435185039e-05, |
|
"loss": 0.9038, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 3.931459904043866, |
|
"grad_norm": 0.13751484111663212, |
|
"learning_rate": 1.047006472164287e-05, |
|
"loss": 0.8925, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 3.9369431117203564, |
|
"grad_norm": 0.1431600770457188, |
|
"learning_rate": 1.0366785512226359e-05, |
|
"loss": 0.903, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 3.942426319396847, |
|
"grad_norm": 0.7396147350976154, |
|
"learning_rate": 1.0263942326588054e-05, |
|
"loss": 0.9243, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 3.947909527073338, |
|
"grad_norm": 0.18213078561266646, |
|
"learning_rate": 1.0161536677964933e-05, |
|
"loss": 0.8895, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.9533927347498286, |
|
"grad_norm": 0.19698575071196872, |
|
"learning_rate": 1.0059570073155953e-05, |
|
"loss": 0.8988, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 3.958875942426319, |
|
"grad_norm": 0.16625311791486364, |
|
"learning_rate": 9.958044012500023e-06, |
|
"loss": 0.8841, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 3.96435915010281, |
|
"grad_norm": 0.18406574667380657, |
|
"learning_rate": 9.856959989853876e-06, |
|
"loss": 0.8959, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 3.969842357779301, |
|
"grad_norm": 0.16041228708375482, |
|
"learning_rate": 9.75631949257004e-06, |
|
"loss": 0.8885, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 3.9753255654557917, |
|
"grad_norm": 0.16790760471515281, |
|
"learning_rate": 9.656124001475068e-06, |
|
"loss": 0.8832, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.9808087731322823, |
|
"grad_norm": 0.1643989573541531, |
|
"learning_rate": 9.556374990847618e-06, |
|
"loss": 0.9077, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 3.9862919808087733, |
|
"grad_norm": 0.15089157844878578, |
|
"learning_rate": 9.457073928396871e-06, |
|
"loss": 0.8924, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 3.991775188485264, |
|
"grad_norm": 0.15938422497231847, |
|
"learning_rate": 9.358222275240884e-06, |
|
"loss": 0.891, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 3.9972583961617545, |
|
"grad_norm": 0.1386488462476611, |
|
"learning_rate": 9.25982148588506e-06, |
|
"loss": 0.8969, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 4.0027416038382455, |
|
"grad_norm": 0.34700978060414556, |
|
"learning_rate": 9.161873008200816e-06, |
|
"loss": 1.4657, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 4.0082248115147365, |
|
"grad_norm": 0.2465097801021323, |
|
"learning_rate": 9.064378283404247e-06, |
|
"loss": 0.8725, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 4.013708019191227, |
|
"grad_norm": 0.26497344033852505, |
|
"learning_rate": 8.967338746034882e-06, |
|
"loss": 0.8759, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 4.019191226867718, |
|
"grad_norm": 0.21838213857553232, |
|
"learning_rate": 8.870755823934662e-06, |
|
"loss": 0.8627, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 4.024674434544209, |
|
"grad_norm": 0.20277610227352777, |
|
"learning_rate": 8.774630938226831e-06, |
|
"loss": 0.886, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 4.030157642220699, |
|
"grad_norm": 0.24713492980404156, |
|
"learning_rate": 8.678965503295114e-06, |
|
"loss": 0.8692, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 4.03564084989719, |
|
"grad_norm": 0.18596227337602494, |
|
"learning_rate": 8.583760926762852e-06, |
|
"loss": 0.8702, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 4.041124057573681, |
|
"grad_norm": 0.2255598033578029, |
|
"learning_rate": 8.489018609472297e-06, |
|
"loss": 0.859, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 4.046607265250171, |
|
"grad_norm": 0.19671824377342131, |
|
"learning_rate": 8.394739945464016e-06, |
|
"loss": 0.8825, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 4.052090472926662, |
|
"grad_norm": 0.19536276470724584, |
|
"learning_rate": 8.300926321956391e-06, |
|
"loss": 0.8655, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 4.057573680603153, |
|
"grad_norm": 0.20553044507457272, |
|
"learning_rate": 8.207579119325145e-06, |
|
"loss": 0.8581, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 4.063056888279643, |
|
"grad_norm": 0.1745254415584832, |
|
"learning_rate": 8.114699711083113e-06, |
|
"loss": 0.8559, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 4.068540095956134, |
|
"grad_norm": 0.19419389496880718, |
|
"learning_rate": 8.022289463859963e-06, |
|
"loss": 0.8684, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 4.074023303632625, |
|
"grad_norm": 0.16866208377226335, |
|
"learning_rate": 7.930349737382137e-06, |
|
"loss": 0.8603, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 4.079506511309116, |
|
"grad_norm": 0.17045009966171426, |
|
"learning_rate": 7.838881884452827e-06, |
|
"loss": 0.8775, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 4.084989718985606, |
|
"grad_norm": 0.1672289163103852, |
|
"learning_rate": 7.747887250932047e-06, |
|
"loss": 0.8577, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 4.090472926662097, |
|
"grad_norm": 0.16957620944180635, |
|
"learning_rate": 7.657367175716884e-06, |
|
"loss": 0.8573, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 4.095956134338588, |
|
"grad_norm": 0.15814438359723273, |
|
"learning_rate": 7.5673229907217146e-06, |
|
"loss": 0.8519, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 4.1014393420150785, |
|
"grad_norm": 0.1554296876299363, |
|
"learning_rate": 7.477756020858695e-06, |
|
"loss": 0.8635, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 4.10692254969157, |
|
"grad_norm": 0.16052588316393465, |
|
"learning_rate": 7.38866758401823e-06, |
|
"loss": 0.8723, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 4.112405757368061, |
|
"grad_norm": 0.15330364083531656, |
|
"learning_rate": 7.300058991049534e-06, |
|
"loss": 0.8828, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 4.117888965044551, |
|
"grad_norm": 0.1495227789255928, |
|
"learning_rate": 7.211931545741433e-06, |
|
"loss": 0.8506, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 4.123372172721042, |
|
"grad_norm": 0.15408046491976884, |
|
"learning_rate": 7.124286544803136e-06, |
|
"loss": 0.8668, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 4.128855380397533, |
|
"grad_norm": 0.14881498638247898, |
|
"learning_rate": 7.037125277845112e-06, |
|
"loss": 0.8801, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 4.134338588074023, |
|
"grad_norm": 0.1464102070171832, |
|
"learning_rate": 6.950449027360213e-06, |
|
"loss": 0.8726, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 4.139821795750514, |
|
"grad_norm": 0.1453166333634366, |
|
"learning_rate": 6.864259068704688e-06, |
|
"loss": 0.8607, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 4.145305003427005, |
|
"grad_norm": 0.13625555326942757, |
|
"learning_rate": 6.778556670079535e-06, |
|
"loss": 0.8792, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 4.150788211103496, |
|
"grad_norm": 0.13807498863680387, |
|
"learning_rate": 6.69334309251175e-06, |
|
"loss": 0.8474, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 4.156271418779986, |
|
"grad_norm": 0.14308904016946167, |
|
"learning_rate": 6.608619589835803e-06, |
|
"loss": 0.8683, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 4.161754626456477, |
|
"grad_norm": 0.1440963907671081, |
|
"learning_rate": 6.524387408675208e-06, |
|
"loss": 0.8633, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 4.167237834132968, |
|
"grad_norm": 0.14254526065135373, |
|
"learning_rate": 6.440647788424166e-06, |
|
"loss": 0.8714, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 4.172721041809458, |
|
"grad_norm": 0.15400971348680637, |
|
"learning_rate": 6.357401961229293e-06, |
|
"loss": 0.8719, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 4.178204249485949, |
|
"grad_norm": 0.1334821564613061, |
|
"learning_rate": 6.274651151971567e-06, |
|
"loss": 0.8604, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 4.18368745716244, |
|
"grad_norm": 0.15458395585147408, |
|
"learning_rate": 6.1923965782482165e-06, |
|
"loss": 0.8855, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 4.18917066483893, |
|
"grad_norm": 0.14422168418866493, |
|
"learning_rate": 6.110639450354882e-06, |
|
"loss": 0.8709, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 4.194653872515421, |
|
"grad_norm": 0.13640204014715013, |
|
"learning_rate": 6.0293809712677775e-06, |
|
"loss": 0.8779, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 4.2001370801919125, |
|
"grad_norm": 0.13728381458929773, |
|
"learning_rate": 5.9486223366259555e-06, |
|
"loss": 0.863, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 4.205620287868403, |
|
"grad_norm": 1.0957146408038854, |
|
"learning_rate": 5.868364734713776e-06, |
|
"loss": 0.8706, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 4.211103495544894, |
|
"grad_norm": 0.14870058965626423, |
|
"learning_rate": 5.788609346443386e-06, |
|
"loss": 0.8808, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 4.216586703221385, |
|
"grad_norm": 0.16703761517821936, |
|
"learning_rate": 5.70935734533733e-06, |
|
"loss": 0.852, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 4.222069910897876, |
|
"grad_norm": 0.1844114299522098, |
|
"learning_rate": 5.630609897511328e-06, |
|
"loss": 0.8613, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 4.227553118574366, |
|
"grad_norm": 0.17398006404372696, |
|
"learning_rate": 5.552368161657082e-06, |
|
"loss": 0.8556, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 4.233036326250857, |
|
"grad_norm": 0.16676512516292483, |
|
"learning_rate": 5.474633289025244e-06, |
|
"loss": 0.8425, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 4.238519533927348, |
|
"grad_norm": 0.15690331804763263, |
|
"learning_rate": 5.397406423408446e-06, |
|
"loss": 0.874, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 4.244002741603838, |
|
"grad_norm": 0.15751054116710525, |
|
"learning_rate": 5.3206887011245165e-06, |
|
"loss": 0.8871, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 4.249485949280329, |
|
"grad_norm": 0.1535880170728185, |
|
"learning_rate": 5.24448125099974e-06, |
|
"loss": 0.8681, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 4.25496915695682, |
|
"grad_norm": 0.15063251461992208, |
|
"learning_rate": 5.1687851943522215e-06, |
|
"loss": 0.8744, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 4.26045236463331, |
|
"grad_norm": 0.1520419945091867, |
|
"learning_rate": 5.093601644975428e-06, |
|
"loss": 0.8681, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 4.265935572309801, |
|
"grad_norm": 0.14153569721366746, |
|
"learning_rate": 5.018931709121791e-06, |
|
"loss": 0.8638, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 4.271418779986292, |
|
"grad_norm": 0.14202440325296312, |
|
"learning_rate": 4.9447764854863915e-06, |
|
"loss": 0.8595, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 4.276901987662782, |
|
"grad_norm": 0.13789016773017412, |
|
"learning_rate": 4.871137065190854e-06, |
|
"loss": 0.8738, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 4.282385195339273, |
|
"grad_norm": 0.13587353525443027, |
|
"learning_rate": 4.798014531767261e-06, |
|
"loss": 0.8665, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 4.287868403015764, |
|
"grad_norm": 0.13265718560401082, |
|
"learning_rate": 4.725409961142173e-06, |
|
"loss": 0.874, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 4.293351610692255, |
|
"grad_norm": 0.13693830134507576, |
|
"learning_rate": 4.653324421620884e-06, |
|
"loss": 0.8619, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 4.2988348183687455, |
|
"grad_norm": 0.13146748198275093, |
|
"learning_rate": 4.581758973871609e-06, |
|
"loss": 0.8825, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 4.3043180260452365, |
|
"grad_norm": 0.13405773370704815, |
|
"learning_rate": 4.510714670909946e-06, |
|
"loss": 0.8536, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 4.3098012337217275, |
|
"grad_norm": 0.14574117129879566, |
|
"learning_rate": 4.440192558083367e-06, |
|
"loss": 0.8506, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 4.315284441398218, |
|
"grad_norm": 0.13101655607771853, |
|
"learning_rate": 4.370193673055787e-06, |
|
"loss": 0.8636, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 4.320767649074709, |
|
"grad_norm": 0.13191753249596408, |
|
"learning_rate": 4.300719045792376e-06, |
|
"loss": 0.8671, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 4.3262508567512, |
|
"grad_norm": 0.1310909838553508, |
|
"learning_rate": 4.231769698544352e-06, |
|
"loss": 0.8924, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 4.33173406442769, |
|
"grad_norm": 0.13210093237615378, |
|
"learning_rate": 4.163346645833928e-06, |
|
"loss": 0.8605, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 4.337217272104181, |
|
"grad_norm": 0.128792479044049, |
|
"learning_rate": 4.0954508944394474e-06, |
|
"loss": 0.8695, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 4.342700479780672, |
|
"grad_norm": 0.22455677717587358, |
|
"learning_rate": 4.028083443380486e-06, |
|
"loss": 0.8691, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 4.348183687457162, |
|
"grad_norm": 0.40527297890394276, |
|
"learning_rate": 3.961245283903239e-06, |
|
"loss": 0.8805, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 4.353666895133653, |
|
"grad_norm": 0.2888577519432354, |
|
"learning_rate": 3.89493739946587e-06, |
|
"loss": 0.8803, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 4.359150102810144, |
|
"grad_norm": 0.1350998279076715, |
|
"learning_rate": 3.829160765724052e-06, |
|
"loss": 0.8513, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 4.364633310486635, |
|
"grad_norm": 0.1309943366405807, |
|
"learning_rate": 3.7639163505166633e-06, |
|
"loss": 0.8668, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 4.370116518163125, |
|
"grad_norm": 0.12953103571452862, |
|
"learning_rate": 3.6992051138514717e-06, |
|
"loss": 0.8712, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 4.375599725839616, |
|
"grad_norm": 0.13057599590166327, |
|
"learning_rate": 3.635028007891048e-06, |
|
"loss": 0.8686, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 4.381082933516107, |
|
"grad_norm": 0.12973353273093458, |
|
"learning_rate": 3.5713859769387795e-06, |
|
"loss": 0.8831, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 4.386566141192597, |
|
"grad_norm": 0.13131456969773786, |
|
"learning_rate": 3.5082799574249094e-06, |
|
"loss": 0.8639, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 4.392049348869088, |
|
"grad_norm": 0.12595490195849035, |
|
"learning_rate": 3.4457108778928272e-06, |
|
"loss": 0.8631, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 4.397532556545579, |
|
"grad_norm": 0.12813268032502836, |
|
"learning_rate": 3.3836796589853484e-06, |
|
"loss": 0.8663, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 4.4030157642220695, |
|
"grad_norm": 0.12716051374477083, |
|
"learning_rate": 3.3221872134312184e-06, |
|
"loss": 0.8525, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 4.4084989718985605, |
|
"grad_norm": 0.12371044909708838, |
|
"learning_rate": 3.261234446031658e-06, |
|
"loss": 0.8679, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 4.413982179575052, |
|
"grad_norm": 0.12478585102448334, |
|
"learning_rate": 3.200822253647031e-06, |
|
"loss": 0.8889, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 4.419465387251542, |
|
"grad_norm": 0.1293948457179513, |
|
"learning_rate": 3.140951525183691e-06, |
|
"loss": 0.8629, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 4.424948594928033, |
|
"grad_norm": 0.127763766689417, |
|
"learning_rate": 3.0816231415808785e-06, |
|
"loss": 0.8545, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 4.430431802604524, |
|
"grad_norm": 0.12507739335573223, |
|
"learning_rate": 3.02283797579773e-06, |
|
"loss": 0.8719, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 4.435915010281015, |
|
"grad_norm": 0.12408679274948532, |
|
"learning_rate": 2.9645968928005085e-06, |
|
"loss": 0.8643, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 4.441398217957505, |
|
"grad_norm": 0.14843016184470606, |
|
"learning_rate": 2.906900749549784e-06, |
|
"loss": 0.8993, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 4.446881425633996, |
|
"grad_norm": 0.12190268058344725, |
|
"learning_rate": 2.849750394987907e-06, |
|
"loss": 0.8632, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 4.452364633310487, |
|
"grad_norm": 0.12079449899424143, |
|
"learning_rate": 2.793146670026472e-06, |
|
"loss": 0.8792, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 4.457847840986977, |
|
"grad_norm": 0.12301325559585843, |
|
"learning_rate": 2.737090407533938e-06, |
|
"loss": 0.8561, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 4.463331048663468, |
|
"grad_norm": 0.12435308059508353, |
|
"learning_rate": 2.681582432323406e-06, |
|
"loss": 0.867, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 4.468814256339959, |
|
"grad_norm": 0.12288901600568765, |
|
"learning_rate": 2.6266235611404645e-06, |
|
"loss": 0.8742, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 4.474297464016449, |
|
"grad_norm": 0.12223916369585316, |
|
"learning_rate": 2.5722146026511574e-06, |
|
"loss": 0.8566, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 4.47978067169294, |
|
"grad_norm": 0.12065119171288469, |
|
"learning_rate": 2.5183563574301185e-06, |
|
"loss": 0.869, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 4.485263879369431, |
|
"grad_norm": 0.12212414616793427, |
|
"learning_rate": 2.465049617948778e-06, |
|
"loss": 0.8776, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 4.490747087045921, |
|
"grad_norm": 0.12172913149868783, |
|
"learning_rate": 2.4122951685636674e-06, |
|
"loss": 0.8778, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 4.496230294722412, |
|
"grad_norm": 0.12399214672845997, |
|
"learning_rate": 2.3600937855049467e-06, |
|
"loss": 0.872, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 4.501713502398903, |
|
"grad_norm": 0.1209018345998114, |
|
"learning_rate": 2.308446236864916e-06, |
|
"loss": 0.8586, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 4.5071967100753945, |
|
"grad_norm": 0.1177513204148733, |
|
"learning_rate": 2.257353282586774e-06, |
|
"loss": 0.8404, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 4.512679917751885, |
|
"grad_norm": 0.12365836386703247, |
|
"learning_rate": 2.206815674453373e-06, |
|
"loss": 0.876, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 4.518163125428376, |
|
"grad_norm": 0.12060944467881936, |
|
"learning_rate": 2.1568341560762152e-06, |
|
"loss": 0.8756, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 4.523646333104867, |
|
"grad_norm": 0.1205949737711182, |
|
"learning_rate": 2.1074094628844754e-06, |
|
"loss": 0.8777, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 4.529129540781357, |
|
"grad_norm": 0.12079575476686041, |
|
"learning_rate": 2.0585423221141807e-06, |
|
"loss": 0.8736, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 4.534612748457848, |
|
"grad_norm": 0.1218689347455744, |
|
"learning_rate": 2.010233452797534e-06, |
|
"loss": 0.8667, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 4.540095956134339, |
|
"grad_norm": 0.12283214110808495, |
|
"learning_rate": 1.9624835657523222e-06, |
|
"loss": 0.8666, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 4.54557916381083, |
|
"grad_norm": 0.11913304460847574, |
|
"learning_rate": 1.9152933635714354e-06, |
|
"loss": 0.8667, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 4.55106237148732, |
|
"grad_norm": 0.12377547685145791, |
|
"learning_rate": 1.8686635406125697e-06, |
|
"loss": 0.8898, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 4.556545579163811, |
|
"grad_norm": 0.11633012806752614, |
|
"learning_rate": 1.822594782987972e-06, |
|
"loss": 0.8411, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 4.562028786840301, |
|
"grad_norm": 0.11864592533450119, |
|
"learning_rate": 1.7770877685543687e-06, |
|
"loss": 0.8679, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 4.567511994516792, |
|
"grad_norm": 0.12190861480595079, |
|
"learning_rate": 1.7321431669029953e-06, |
|
"loss": 0.8708, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 4.572995202193283, |
|
"grad_norm": 0.12195430153778734, |
|
"learning_rate": 1.6877616393497075e-06, |
|
"loss": 0.8762, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 4.578478409869774, |
|
"grad_norm": 0.12318104357609574, |
|
"learning_rate": 1.6439438389252948e-06, |
|
"loss": 0.8817, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 4.583961617546264, |
|
"grad_norm": 0.1252397305163344, |
|
"learning_rate": 1.6006904103658572e-06, |
|
"loss": 0.8713, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 4.589444825222755, |
|
"grad_norm": 0.11863485627738708, |
|
"learning_rate": 1.5580019901032929e-06, |
|
"loss": 0.8585, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 4.594928032899246, |
|
"grad_norm": 0.11926883785074256, |
|
"learning_rate": 1.5158792062559813e-06, |
|
"loss": 0.8553, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 4.6004112405757365, |
|
"grad_norm": 0.12010806317107317, |
|
"learning_rate": 1.4743226786194931e-06, |
|
"loss": 0.8632, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 4.6058944482522275, |
|
"grad_norm": 0.1209497918934812, |
|
"learning_rate": 1.4333330186575079e-06, |
|
"loss": 0.8661, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 4.6113776559287185, |
|
"grad_norm": 0.12015700387003853, |
|
"learning_rate": 1.3929108294927951e-06, |
|
"loss": 0.8672, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 4.6168608636052095, |
|
"grad_norm": 0.12422998319235647, |
|
"learning_rate": 1.3530567058983369e-06, |
|
"loss": 0.8732, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 4.6223440712817, |
|
"grad_norm": 0.11888832603251932, |
|
"learning_rate": 1.31377123428861e-06, |
|
"loss": 0.8723, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 4.627827278958191, |
|
"grad_norm": 0.11996366586250991, |
|
"learning_rate": 1.2750549927109136e-06, |
|
"loss": 0.8574, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 4.633310486634681, |
|
"grad_norm": 0.11745160261296496, |
|
"learning_rate": 1.2369085508368862e-06, |
|
"loss": 0.8604, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 4.638793694311172, |
|
"grad_norm": 0.12200784232729492, |
|
"learning_rate": 1.1993324699541265e-06, |
|
"loss": 0.8517, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 4.644276901987663, |
|
"grad_norm": 0.11775847148513344, |
|
"learning_rate": 1.1623273029579195e-06, |
|
"loss": 0.8695, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 4.649760109664154, |
|
"grad_norm": 0.11881330238355213, |
|
"learning_rate": 1.1258935943431237e-06, |
|
"loss": 0.8667, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 4.655243317340644, |
|
"grad_norm": 0.11930205382890313, |
|
"learning_rate": 1.090031880196145e-06, |
|
"loss": 0.8669, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 4.660726525017135, |
|
"grad_norm": 0.12062456998925507, |
|
"learning_rate": 1.0547426881870292e-06, |
|
"loss": 0.8706, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.666209732693626, |
|
"grad_norm": 0.119609080366811, |
|
"learning_rate": 1.0200265375617514e-06, |
|
"loss": 0.8735, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 4.671692940370116, |
|
"grad_norm": 0.11883822127861333, |
|
"learning_rate": 9.858839391345065e-07, |
|
"loss": 0.8857, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 4.677176148046607, |
|
"grad_norm": 0.11869096130082298, |
|
"learning_rate": 9.523153952802633e-07, |
|
"loss": 0.8567, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 4.682659355723098, |
|
"grad_norm": 0.11660180847816304, |
|
"learning_rate": 9.193213999273199e-07, |
|
"loss": 0.8732, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 4.688142563399589, |
|
"grad_norm": 0.1156650459781897, |
|
"learning_rate": 8.869024385500524e-07, |
|
"loss": 0.8694, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 4.693625771076079, |
|
"grad_norm": 0.11849635720653122, |
|
"learning_rate": 8.550589881617877e-07, |
|
"loss": 0.8666, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 4.69910897875257, |
|
"grad_norm": 0.12360260427554048, |
|
"learning_rate": 8.237915173077681e-07, |
|
"loss": 0.8503, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 4.7045921864290605, |
|
"grad_norm": 0.11834338075827068, |
|
"learning_rate": 7.93100486058247e-07, |
|
"loss": 0.8824, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 4.7100753941055515, |
|
"grad_norm": 0.12094889766224019, |
|
"learning_rate": 7.629863460017506e-07, |
|
"loss": 0.8686, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 4.715558601782043, |
|
"grad_norm": 0.12037069037280847, |
|
"learning_rate": 7.334495402383957e-07, |
|
"loss": 0.8668, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 4.721041809458534, |
|
"grad_norm": 0.1165471688624166, |
|
"learning_rate": 7.044905033734096e-07, |
|
"loss": 0.878, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 4.726525017135024, |
|
"grad_norm": 0.11868226704814998, |
|
"learning_rate": 6.761096615107043e-07, |
|
"loss": 0.8786, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 4.732008224811515, |
|
"grad_norm": 0.11761938779683033, |
|
"learning_rate": 6.483074322466154e-07, |
|
"loss": 0.8801, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 4.737491432488006, |
|
"grad_norm": 0.119444219753523, |
|
"learning_rate": 6.210842246637683e-07, |
|
"loss": 0.8734, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 4.742974640164496, |
|
"grad_norm": 0.11552293652107705, |
|
"learning_rate": 5.944404393250481e-07, |
|
"loss": 0.8443, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 4.748457847840987, |
|
"grad_norm": 0.11624067828548895, |
|
"learning_rate": 5.683764682677018e-07, |
|
"loss": 0.8826, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 4.753941055517478, |
|
"grad_norm": 0.11747660823290842, |
|
"learning_rate": 5.428926949975788e-07, |
|
"loss": 0.871, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 4.759424263193969, |
|
"grad_norm": 0.11800804895284038, |
|
"learning_rate": 5.179894944834863e-07, |
|
"loss": 0.8808, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 4.764907470870459, |
|
"grad_norm": 0.11426381832520421, |
|
"learning_rate": 4.936672331516778e-07, |
|
"loss": 0.865, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 4.77039067854695, |
|
"grad_norm": 0.11934453977172532, |
|
"learning_rate": 4.699262688804451e-07, |
|
"loss": 0.8476, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 4.77587388622344, |
|
"grad_norm": 0.11932772884879854, |
|
"learning_rate": 4.467669509948591e-07, |
|
"loss": 0.8644, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 4.781357093899931, |
|
"grad_norm": 0.11533502002121204, |
|
"learning_rate": 4.241896202616502e-07, |
|
"loss": 0.8659, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 4.786840301576422, |
|
"grad_norm": 0.11707563398942186, |
|
"learning_rate": 4.0219460888415884e-07, |
|
"loss": 0.8687, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 4.792323509252913, |
|
"grad_norm": 0.11572299884307491, |
|
"learning_rate": 3.807822404974726e-07, |
|
"loss": 0.8619, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 4.797806716929403, |
|
"grad_norm": 0.11414580388031612, |
|
"learning_rate": 3.599528301636612e-07, |
|
"loss": 0.8773, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 4.803289924605894, |
|
"grad_norm": 0.1139811437838818, |
|
"learning_rate": 3.397066843671315e-07, |
|
"loss": 0.8614, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 4.8087731322823855, |
|
"grad_norm": 0.11506534996205621, |
|
"learning_rate": 3.200441010101196e-07, |
|
"loss": 0.851, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 4.814256339958876, |
|
"grad_norm": 0.11489034470022166, |
|
"learning_rate": 3.0096536940832145e-07, |
|
"loss": 0.8847, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 4.819739547635367, |
|
"grad_norm": 0.11675777776548621, |
|
"learning_rate": 2.824707702866114e-07, |
|
"loss": 0.8569, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 4.825222755311858, |
|
"grad_norm": 0.11463623776794303, |
|
"learning_rate": 2.645605757749392e-07, |
|
"loss": 0.8607, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 4.830705962988349, |
|
"grad_norm": 0.11596024436355572, |
|
"learning_rate": 2.4723504940430187e-07, |
|
"loss": 0.8815, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 4.836189170664839, |
|
"grad_norm": 0.11832403238695531, |
|
"learning_rate": 2.3049444610288462e-07, |
|
"loss": 0.8729, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 4.84167237834133, |
|
"grad_norm": 0.1163599327714919, |
|
"learning_rate": 2.1433901219229502e-07, |
|
"loss": 0.8607, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 4.84715558601782, |
|
"grad_norm": 0.11506741204644842, |
|
"learning_rate": 1.9876898538394362e-07, |
|
"loss": 0.875, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 4.852638793694311, |
|
"grad_norm": 0.11571697024128855, |
|
"learning_rate": 1.8378459477555788e-07, |
|
"loss": 0.876, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 4.858122001370802, |
|
"grad_norm": 0.1233092898569507, |
|
"learning_rate": 1.6938606084779375e-07, |
|
"loss": 0.8612, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 4.863605209047293, |
|
"grad_norm": 0.11387876698076485, |
|
"learning_rate": 1.555735954610027e-07, |
|
"loss": 0.8499, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 4.869088416723783, |
|
"grad_norm": 0.11498145411665506, |
|
"learning_rate": 1.4234740185210095e-07, |
|
"loss": 0.859, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 4.874571624400274, |
|
"grad_norm": 0.11650609203788179, |
|
"learning_rate": 1.2970767463160284e-07, |
|
"loss": 0.8602, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 4.880054832076765, |
|
"grad_norm": 0.11664236567167152, |
|
"learning_rate": 1.176545997807299e-07, |
|
"loss": 0.8746, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.885538039753255, |
|
"grad_norm": 0.11702468705401428, |
|
"learning_rate": 1.0618835464870191e-07, |
|
"loss": 0.8525, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 4.891021247429746, |
|
"grad_norm": 0.11451498374169765, |
|
"learning_rate": 9.530910795009895e-08, |
|
"loss": 0.8599, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 4.896504455106237, |
|
"grad_norm": 0.11475845584891285, |
|
"learning_rate": 8.501701976239673e-08, |
|
"loss": 0.8583, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 4.901987662782728, |
|
"grad_norm": 0.11547791831787842, |
|
"learning_rate": 7.531224152362183e-08, |
|
"loss": 0.8655, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 4.9074708704592185, |
|
"grad_norm": 0.11917262127195702, |
|
"learning_rate": 6.619491603008676e-08, |
|
"loss": 0.8724, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 4.9129540781357095, |
|
"grad_norm": 0.11681023683693044, |
|
"learning_rate": 5.766517743432953e-08, |
|
"loss": 0.8638, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 4.9184372858122005, |
|
"grad_norm": 0.11279645464293689, |
|
"learning_rate": 4.9723151243106225e-08, |
|
"loss": 0.8528, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 4.923920493488691, |
|
"grad_norm": 0.1138256632587849, |
|
"learning_rate": 4.236895431557031e-08, |
|
"loss": 0.8572, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 4.929403701165182, |
|
"grad_norm": 0.11375846263511914, |
|
"learning_rate": 3.560269486154066e-08, |
|
"loss": 0.8391, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 4.934886908841673, |
|
"grad_norm": 0.11717016335228325, |
|
"learning_rate": 2.9424472439911757e-08, |
|
"loss": 0.8752, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.940370116518163, |
|
"grad_norm": 0.11269596219934125, |
|
"learning_rate": 2.3834377957183684e-08, |
|
"loss": 0.8491, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 4.945853324194654, |
|
"grad_norm": 0.1158772425437692, |
|
"learning_rate": 1.8832493666125494e-08, |
|
"loss": 0.872, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 4.951336531871145, |
|
"grad_norm": 0.9980363287282, |
|
"learning_rate": 1.4418893164585002e-08, |
|
"loss": 0.9242, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 4.956819739547635, |
|
"grad_norm": 0.11610912640066366, |
|
"learning_rate": 1.0593641394369691e-08, |
|
"loss": 0.8648, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 4.962302947224126, |
|
"grad_norm": 0.11625433625052747, |
|
"learning_rate": 7.356794640318576e-09, |
|
"loss": 0.857, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 4.967786154900617, |
|
"grad_norm": 0.11721935619429466, |
|
"learning_rate": 4.708400529476187e-09, |
|
"loss": 0.85, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 4.973269362577108, |
|
"grad_norm": 0.11503586503967776, |
|
"learning_rate": 2.648498030364266e-09, |
|
"loss": 0.8709, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 4.978752570253598, |
|
"grad_norm": 0.11671324103566125, |
|
"learning_rate": 1.1771174524355388e-09, |
|
"loss": 0.8843, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 4.984235777930089, |
|
"grad_norm": 0.11519765971232111, |
|
"learning_rate": 2.9428044562074265e-10, |
|
"loss": 0.862, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 4.98971898560658, |
|
"grad_norm": 0.11510258089520117, |
|
"learning_rate": 0.0, |
|
"loss": 0.8606, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.98971898560658, |
|
"step": 910, |
|
"total_flos": 2.1065883137322516e+19, |
|
"train_loss": 1.0237522615836216, |
|
"train_runtime": 109255.8386, |
|
"train_samples_per_second": 4.272, |
|
"train_steps_per_second": 0.008 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 910, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.1065883137322516e+19, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|