| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 30.0, | |
| "global_step": 1410, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.4184397163120568e-07, | |
| "loss": 38.9356, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 2.8368794326241136e-07, | |
| "loss": 40.4654, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.2553191489361704e-07, | |
| "loss": 44.0169, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 5.673758865248227e-07, | |
| "loss": 46.5705, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 7.092198581560285e-07, | |
| "loss": 41.8988, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 8.510638297872341e-07, | |
| "loss": 39.3279, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9.929078014184399e-07, | |
| "loss": 38.168, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.1347517730496454e-06, | |
| "loss": 36.2625, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.276595744680851e-06, | |
| "loss": 45.8777, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.418439716312057e-06, | |
| "loss": 38.6234, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.5602836879432626e-06, | |
| "loss": 42.2255, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.7021276595744682e-06, | |
| "loss": 37.4196, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.8439716312056737e-06, | |
| "loss": 33.3796, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9858156028368797e-06, | |
| "loss": 37.4033, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 2.1276595744680853e-06, | |
| "loss": 37.6539, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 2.269503546099291e-06, | |
| "loss": 41.804, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 2.4113475177304965e-06, | |
| "loss": 32.3455, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 2.553191489361702e-06, | |
| "loss": 38.5504, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 2.695035460992908e-06, | |
| "loss": 43.7085, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.836879432624114e-06, | |
| "loss": 37.1537, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.978723404255319e-06, | |
| "loss": 35.6007, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 3.120567375886525e-06, | |
| "loss": 39.1722, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 3.262411347517731e-06, | |
| "loss": 37.4648, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 3.4042553191489363e-06, | |
| "loss": 30.8264, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 3.5460992907801423e-06, | |
| "loss": 34.4226, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 3.6879432624113475e-06, | |
| "loss": 42.0648, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 3.8297872340425535e-06, | |
| "loss": 37.8212, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 3.9716312056737595e-06, | |
| "loss": 40.5726, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.113475177304965e-06, | |
| "loss": 36.1741, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.255319148936171e-06, | |
| "loss": 32.9572, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 4.397163120567377e-06, | |
| "loss": 32.4154, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.539007092198582e-06, | |
| "loss": 30.6888, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.680851063829788e-06, | |
| "loss": 27.9387, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.822695035460993e-06, | |
| "loss": 35.6382, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.964539007092199e-06, | |
| "loss": 29.7059, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 5.106382978723404e-06, | |
| "loss": 30.219, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.24822695035461e-06, | |
| "loss": 26.2963, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 5.390070921985816e-06, | |
| "loss": 28.7659, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 5.531914893617022e-06, | |
| "loss": 29.2307, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 5.673758865248228e-06, | |
| "loss": 27.3642, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 5.815602836879432e-06, | |
| "loss": 24.7804, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 5.957446808510638e-06, | |
| "loss": 26.2994, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 6.099290780141844e-06, | |
| "loss": 23.4272, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 6.24113475177305e-06, | |
| "loss": 24.4856, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 6.382978723404256e-06, | |
| "loss": 23.7573, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 6.524822695035462e-06, | |
| "loss": 27.808, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 23.3867, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 6.808510638297873e-06, | |
| "loss": 24.1434, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 6.950354609929079e-06, | |
| "loss": 22.114, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 7.092198581560285e-06, | |
| "loss": 24.6335, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 7.234042553191491e-06, | |
| "loss": 23.305, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 7.375886524822695e-06, | |
| "loss": 19.9513, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 7.517730496453901e-06, | |
| "loss": 23.5701, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 7.659574468085107e-06, | |
| "loss": 23.3574, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 7.801418439716313e-06, | |
| "loss": 22.8893, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 7.943262411347519e-06, | |
| "loss": 22.3607, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 8.085106382978723e-06, | |
| "loss": 22.5495, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 8.22695035460993e-06, | |
| "loss": 19.1209, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 8.368794326241135e-06, | |
| "loss": 19.6811, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 8.510638297872341e-06, | |
| "loss": 16.6116, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 8.652482269503547e-06, | |
| "loss": 19.5871, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 8.794326241134753e-06, | |
| "loss": 17.8217, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 8.936170212765958e-06, | |
| "loss": 15.528, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 9.078014184397164e-06, | |
| "loss": 16.2454, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 9.21985815602837e-06, | |
| "loss": 18.046, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 9.361702127659576e-06, | |
| "loss": 14.2817, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 9.503546099290782e-06, | |
| "loss": 16.3816, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 9.645390070921986e-06, | |
| "loss": 16.0658, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 9.787234042553192e-06, | |
| "loss": 16.0036, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 9.929078014184398e-06, | |
| "loss": 12.9345, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.0070921985815602e-05, | |
| "loss": 14.7829, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.0212765957446808e-05, | |
| "loss": 14.0603, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.0354609929078014e-05, | |
| "loss": 15.4779, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.049645390070922e-05, | |
| "loss": 13.145, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.0638297872340426e-05, | |
| "loss": 15.1291, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.0780141843971632e-05, | |
| "loss": 13.6331, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.0921985815602838e-05, | |
| "loss": 13.8599, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 1.1063829787234044e-05, | |
| "loss": 14.0126, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 1.120567375886525e-05, | |
| "loss": 12.7355, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.1347517730496456e-05, | |
| "loss": 11.5927, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.1489361702127662e-05, | |
| "loss": 12.736, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.1631205673758865e-05, | |
| "loss": 12.5622, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 1.177304964539007e-05, | |
| "loss": 11.792, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 1.1914893617021277e-05, | |
| "loss": 10.3158, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.2056737588652483e-05, | |
| "loss": 11.1876, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.2198581560283689e-05, | |
| "loss": 9.8064, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.2340425531914895e-05, | |
| "loss": 9.0112, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.24822695035461e-05, | |
| "loss": 8.9206, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.2624113475177307e-05, | |
| "loss": 10.2341, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.2765957446808513e-05, | |
| "loss": 9.4922, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.2907801418439719e-05, | |
| "loss": 9.5027, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.3049645390070925e-05, | |
| "loss": 8.4176, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.3191489361702127e-05, | |
| "loss": 8.8949, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 6.9912, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 1.347517730496454e-05, | |
| "loss": 7.6754, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.3617021276595745e-05, | |
| "loss": 8.0876, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.3758865248226951e-05, | |
| "loss": 7.21, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.3900709219858157e-05, | |
| "loss": 7.212, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.4042553191489363e-05, | |
| "loss": 6.8503, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 1.418439716312057e-05, | |
| "loss": 6.0915, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 1.4326241134751775e-05, | |
| "loss": 6.0877, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.4468085106382981e-05, | |
| "loss": 5.0609, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.4609929078014187e-05, | |
| "loss": 4.9673, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.475177304964539e-05, | |
| "loss": 5.0921, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.4893617021276596e-05, | |
| "loss": 4.8247, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.5035460992907802e-05, | |
| "loss": 4.4145, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.5177304964539008e-05, | |
| "loss": 4.5116, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.5319148936170214e-05, | |
| "loss": 3.9883, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.546099290780142e-05, | |
| "loss": 3.7368, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 1.5602836879432626e-05, | |
| "loss": 3.4411, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.5744680851063832e-05, | |
| "loss": 3.656, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.5886524822695038e-05, | |
| "loss": 3.382, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.6028368794326244e-05, | |
| "loss": 3.2706, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.6170212765957446e-05, | |
| "loss": 3.1071, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.6312056737588652e-05, | |
| "loss": 2.9376, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.645390070921986e-05, | |
| "loss": 2.9987, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.6595744680851064e-05, | |
| "loss": 3.0048, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.673758865248227e-05, | |
| "loss": 2.8357, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.6879432624113476e-05, | |
| "loss": 2.7017, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.7021276595744682e-05, | |
| "loss": 2.6277, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.716312056737589e-05, | |
| "loss": 2.4932, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 1.7304964539007094e-05, | |
| "loss": 2.4817, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 1.74468085106383e-05, | |
| "loss": 2.4608, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 1.7588652482269506e-05, | |
| "loss": 2.3458, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 1.773049645390071e-05, | |
| "loss": 2.3615, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 1.7872340425531915e-05, | |
| "loss": 2.3322, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 1.801418439716312e-05, | |
| "loss": 2.4402, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 1.8156028368794327e-05, | |
| "loss": 2.2707, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 1.8297872340425533e-05, | |
| "loss": 2.1061, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 1.843971631205674e-05, | |
| "loss": 2.0111, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 1.8581560283687945e-05, | |
| "loss": 2.0149, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 1.872340425531915e-05, | |
| "loss": 2.0791, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.8865248226950357e-05, | |
| "loss": 1.7946, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.9007092198581563e-05, | |
| "loss": 1.8034, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 1.914893617021277e-05, | |
| "loss": 1.8849, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 1.929078014184397e-05, | |
| "loss": 1.8021, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.9432624113475178e-05, | |
| "loss": 1.8012, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 1.9574468085106384e-05, | |
| "loss": 1.5726, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 1.971631205673759e-05, | |
| "loss": 1.6027, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 1.9858156028368796e-05, | |
| "loss": 1.6119, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 2e-05, | |
| "loss": 1.5423, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 1.9999969355941432e-05, | |
| "loss": 1.5446, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 1.9999877423953538e-05, | |
| "loss": 1.5482, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 1.9999724204599748e-05, | |
| "loss": 1.5518, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 1.9999509698819117e-05, | |
| "loss": 1.4657, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 1.9999233907926312e-05, | |
| "loss": 1.4904, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 1.9998896833611603e-05, | |
| "loss": 1.2306, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 1.9998498477940854e-05, | |
| "loss": 1.307, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 1.999803884335551e-05, | |
| "loss": 1.2354, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 1.9997517932672592e-05, | |
| "loss": 1.3099, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 1.9996935749084657e-05, | |
| "loss": 1.3299, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 1.99962922961598e-05, | |
| "loss": 1.2965, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 1.999558757784162e-05, | |
| "loss": 1.2596, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 1.999482159844921e-05, | |
| "loss": 1.1603, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 1.9993994362677108e-05, | |
| "loss": 1.1086, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 1.999310587559529e-05, | |
| "loss": 1.0816, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 1.9992156142649125e-05, | |
| "loss": 1.1643, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 1.999114516965934e-05, | |
| "loss": 1.0339, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 1.999007296282201e-05, | |
| "loss": 1.0581, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 1.998893952870848e-05, | |
| "loss": 1.0628, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 1.9987744874265357e-05, | |
| "loss": 0.936, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 1.9986489006814454e-05, | |
| "loss": 0.9661, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 1.998517193405275e-05, | |
| "loss": 0.8652, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 1.9983793664052324e-05, | |
| "loss": 0.9032, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 1.9982354205260347e-05, | |
| "loss": 0.8113, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 1.9980853566498984e-05, | |
| "loss": 0.8699, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 1.9979291756965368e-05, | |
| "loss": 0.7534, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 1.9977668786231536e-05, | |
| "loss": 0.7812, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 1.9975984664244365e-05, | |
| "loss": 0.7673, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 1.997423940132553e-05, | |
| "loss": 0.803, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 1.9972433008171417e-05, | |
| "loss": 0.7405, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 1.997056549585307e-05, | |
| "loss": 0.7541, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 1.9968636875816112e-05, | |
| "loss": 0.7322, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 1.9966647159880703e-05, | |
| "loss": 0.6955, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 1.996459636024143e-05, | |
| "loss": 0.6827, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 1.9962484489467262e-05, | |
| "loss": 0.6397, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 1.9960311560501457e-05, | |
| "loss": 0.6536, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 1.9958077586661485e-05, | |
| "loss": 0.6731, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 1.9955782581638948e-05, | |
| "loss": 0.593, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 1.9953426559499508e-05, | |
| "loss": 0.6006, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 1.9951009534682776e-05, | |
| "loss": 0.5463, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 1.9948531522002247e-05, | |
| "loss": 0.5983, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 1.9945992536645188e-05, | |
| "loss": 0.5761, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 1.9943392594172563e-05, | |
| "loss": 0.5364, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 1.994073171051893e-05, | |
| "loss": 0.5438, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 1.993800990199235e-05, | |
| "loss": 0.8069, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 1.9935227185274267e-05, | |
| "loss": 0.5221, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 1.9932383577419432e-05, | |
| "loss": 0.4576, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 1.9929479095855783e-05, | |
| "loss": 0.4969, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 1.9926513758384334e-05, | |
| "loss": 0.4627, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 1.992348758317909e-05, | |
| "loss": 0.4637, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 1.99204005887869e-05, | |
| "loss": 0.4963, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 1.9917252794127376e-05, | |
| "loss": 0.4581, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 1.991404421849276e-05, | |
| "loss": 0.4575, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 1.9910774881547803e-05, | |
| "loss": 0.4474, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 1.9907444803329658e-05, | |
| "loss": 0.3963, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 1.9904054004247754e-05, | |
| "loss": 0.4076, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 1.990060250508365e-05, | |
| "loss": 0.3709, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 1.989709032699094e-05, | |
| "loss": 0.4097, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 1.9893517491495098e-05, | |
| "loss": 0.4051, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 1.9889884020493363e-05, | |
| "loss": 0.4074, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 1.9886189936254596e-05, | |
| "loss": 0.359, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 1.9882435261419142e-05, | |
| "loss": 0.376, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 1.9878620018998696e-05, | |
| "loss": 0.3487, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 1.9874744232376162e-05, | |
| "loss": 0.3658, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 1.9870807925305506e-05, | |
| "loss": 0.3561, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 1.986681112191161e-05, | |
| "loss": 0.354, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 1.9862753846690133e-05, | |
| "loss": 0.3852, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 1.9858636124507347e-05, | |
| "loss": 0.3234, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 1.98544579806e-05, | |
| "loss": 0.3471, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 1.985021944057515e-05, | |
| "loss": 0.3491, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 1.9845920530410003e-05, | |
| "loss": 0.3425, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 1.984156127645178e-05, | |
| "loss": 0.3225, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 1.9837141705417525e-05, | |
| "loss": 0.3315, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 1.983266184439395e-05, | |
| "loss": 0.3023, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 1.9828121720837288e-05, | |
| "loss": 0.3035, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 1.98235213625731e-05, | |
| "loss": 0.2494, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 1.9818860797796106e-05, | |
| "loss": 0.2933, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 1.9814140055070044e-05, | |
| "loss": 0.2922, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 1.980935916332745e-05, | |
| "loss": 0.4312, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 1.980451815186951e-05, | |
| "loss": 0.2623, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 1.979961705036587e-05, | |
| "loss": 0.2653, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 1.9794655888854463e-05, | |
| "loss": 0.2629, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 1.9789634697741307e-05, | |
| "loss": 0.2505, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 1.9784553507800346e-05, | |
| "loss": 0.2366, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 1.977941235017323e-05, | |
| "loss": 0.2257, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 1.977421125636915e-05, | |
| "loss": 0.2523, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 1.9768950258264625e-05, | |
| "loss": 0.2295, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 1.9763629388103325e-05, | |
| "loss": 0.2227, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 1.9758248678495862e-05, | |
| "loss": 0.2199, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 1.975280816241959e-05, | |
| "loss": 0.2485, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 1.9747307873218414e-05, | |
| "loss": 0.2267, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 1.9741747844602562e-05, | |
| "loss": 0.2168, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 1.9736128110648407e-05, | |
| "loss": 0.1815, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 1.973044870579824e-05, | |
| "loss": 0.2438, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 1.9724709664860066e-05, | |
| "loss": 0.2263, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 1.9718911023007382e-05, | |
| "loss": 0.1825, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 1.971305281577897e-05, | |
| "loss": 0.2132, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 1.970713507907869e-05, | |
| "loss": 0.2156, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 1.970115784917523e-05, | |
| "loss": 0.1938, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 1.96951211627019e-05, | |
| "loss": 0.2001, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 1.9689025056656424e-05, | |
| "loss": 0.2059, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 1.9682869568400683e-05, | |
| "loss": 0.2041, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "learning_rate": 1.9676654735660514e-05, | |
| "loss": 0.1836, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 1.9670380596525443e-05, | |
| "loss": 0.1877, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 1.9664047189448496e-05, | |
| "loss": 0.2254, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 1.965765455324593e-05, | |
| "loss": 0.1691, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 1.9651202727097013e-05, | |
| "loss": 0.1707, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "learning_rate": 1.964469175054377e-05, | |
| "loss": 0.1655, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 1.9638121663490745e-05, | |
| "loss": 0.1862, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 1.963149250620477e-05, | |
| "loss": 0.1776, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 1.9624804319314704e-05, | |
| "loss": 0.2166, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 1.961805714381118e-05, | |
| "loss": 0.1938, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 1.961125102104637e-05, | |
| "loss": 0.1724, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 1.9604385992733718e-05, | |
| "loss": 0.1872, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 5.45, | |
| "learning_rate": 1.9597462100947685e-05, | |
| "loss": 0.2104, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 1.9590479388123508e-05, | |
| "loss": 0.1538, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 1.9583437897056915e-05, | |
| "loss": 0.1405, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 1.957633767090388e-05, | |
| "loss": 0.1675, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 1.956917875318035e-05, | |
| "loss": 0.1965, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 1.9561961187761987e-05, | |
| "loss": 0.1718, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 1.955468501888389e-05, | |
| "loss": 0.1561, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "learning_rate": 1.9547350291140327e-05, | |
| "loss": 0.1866, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 1.953995704948446e-05, | |
| "loss": 0.1577, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 1.953250533922808e-05, | |
| "loss": 0.1335, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 1.9524995206041317e-05, | |
| "loss": 0.1633, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 1.9517426695952358e-05, | |
| "loss": 0.1502, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 1.9509799855347177e-05, | |
| "loss": 0.1425, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 1.950211473096925e-05, | |
| "loss": 0.1534, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 1.9494371369919253e-05, | |
| "loss": 0.1907, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 1.9486569819654785e-05, | |
| "loss": 0.1403, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 1.9478710127990084e-05, | |
| "loss": 0.4122, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 1.9470792343095718e-05, | |
| "loss": 0.1667, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 1.94628165134983e-05, | |
| "loss": 0.1403, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 1.945478268808019e-05, | |
| "loss": 0.1678, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 1.944669091607919e-05, | |
| "loss": 0.15, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 1.9438541247088244e-05, | |
| "loss": 0.1645, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 1.943033373105514e-05, | |
| "loss": 0.1499, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 1.9422068418282204e-05, | |
| "loss": 0.146, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 1.9413745359425973e-05, | |
| "loss": 0.1388, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 1.9405364605496912e-05, | |
| "loss": 0.1407, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 1.9396926207859085e-05, | |
| "loss": 0.1715, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.938843021822984e-05, | |
| "loss": 0.1455, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "learning_rate": 1.9379876688679502e-05, | |
| "loss": 0.1411, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 1.9371265671631038e-05, | |
| "loss": 0.1386, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 1.936259721985975e-05, | |
| "loss": 0.1461, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 1.935387138649295e-05, | |
| "loss": 0.3217, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "learning_rate": 1.9345088225009626e-05, | |
| "loss": 0.1481, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 1.9336247789240125e-05, | |
| "loss": 0.1527, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 1.9327350133365806e-05, | |
| "loss": 0.1279, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 1.931839531191873e-05, | |
| "loss": 0.1263, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 1.9309383379781312e-05, | |
| "loss": 0.1179, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 1.930031439218599e-05, | |
| "loss": 0.142, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 1.9291188404714876e-05, | |
| "loss": 0.1278, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 1.9282005473299436e-05, | |
| "loss": 0.134, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 1.9272765654220117e-05, | |
| "loss": 0.1392, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "learning_rate": 1.926346900410604e-05, | |
| "loss": 0.1281, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 1.925411557993462e-05, | |
| "loss": 0.1387, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "learning_rate": 1.924470543903123e-05, | |
| "loss": 0.1428, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "learning_rate": 1.9235238639068855e-05, | |
| "loss": 0.1205, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "learning_rate": 1.922571523806773e-05, | |
| "loss": 0.1259, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 1.9216135294394984e-05, | |
| "loss": 0.1259, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 6.45, | |
| "learning_rate": 1.920649886676429e-05, | |
| "loss": 0.1209, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 6.47, | |
| "learning_rate": 1.9196806014235495e-05, | |
| "loss": 0.1064, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 1.9187056796214275e-05, | |
| "loss": 0.12, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 1.9177251272451742e-05, | |
| "loss": 0.1362, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 6.53, | |
| "learning_rate": 1.9167389503044108e-05, | |
| "loss": 0.1217, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 1.915747154843231e-05, | |
| "loss": 0.1233, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 6.57, | |
| "learning_rate": 1.914749746940161e-05, | |
| "loss": 0.1062, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 1.913746732708127e-05, | |
| "loss": 0.1578, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 1.9127381182944135e-05, | |
| "loss": 0.1289, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "learning_rate": 1.9117239098806296e-05, | |
| "loss": 0.1194, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 6.66, | |
| "learning_rate": 1.910704113682667e-05, | |
| "loss": 0.1334, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 6.68, | |
| "learning_rate": 1.9096787359506642e-05, | |
| "loss": 0.1311, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 6.7, | |
| "learning_rate": 1.9086477829689688e-05, | |
| "loss": 0.1483, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 1.9076112610560974e-05, | |
| "loss": 0.115, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "learning_rate": 1.9065691765646977e-05, | |
| "loss": 0.1013, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 6.77, | |
| "learning_rate": 1.905521535881509e-05, | |
| "loss": 0.1254, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 6.79, | |
| "learning_rate": 1.9044683454273244e-05, | |
| "loss": 0.1257, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 1.903409611656949e-05, | |
| "loss": 0.0993, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 6.83, | |
| "learning_rate": 1.902345341059164e-05, | |
| "loss": 0.155, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 6.85, | |
| "learning_rate": 1.9012755401566823e-05, | |
| "loss": 0.1131, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 1.9002002155061125e-05, | |
| "loss": 0.1276, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 6.89, | |
| "learning_rate": 1.8991193736979176e-05, | |
| "loss": 0.1242, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 6.91, | |
| "learning_rate": 1.8980330213563725e-05, | |
| "loss": 0.125, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 1.896941165139527e-05, | |
| "loss": 0.1188, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "learning_rate": 1.895843811739162e-05, | |
| "loss": 0.1359, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 6.98, | |
| "learning_rate": 1.8947409678807497e-05, | |
| "loss": 0.1293, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "learning_rate": 1.8936326403234125e-05, | |
| "loss": 0.1187, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 1.8925188358598815e-05, | |
| "loss": 0.1457, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 1.8913995613164544e-05, | |
| "loss": 0.1351, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 1.890274823552954e-05, | |
| "loss": 0.0991, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "learning_rate": 1.8891446294626868e-05, | |
| "loss": 0.1086, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "learning_rate": 1.888008985972399e-05, | |
| "loss": 0.2039, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "learning_rate": 1.886867900042236e-05, | |
| "loss": 0.0833, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "learning_rate": 1.8857213786656986e-05, | |
| "loss": 0.1153, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 7.17, | |
| "learning_rate": 1.8845694288696003e-05, | |
| "loss": 0.115, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "learning_rate": 1.8834120577140252e-05, | |
| "loss": 0.1029, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 7.21, | |
| "learning_rate": 1.882249272292282e-05, | |
| "loss": 0.0894, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 7.23, | |
| "learning_rate": 1.8810810797308644e-05, | |
| "loss": 0.1076, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 7.26, | |
| "learning_rate": 1.8799074871894045e-05, | |
| "loss": 0.1212, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 1.87872850186063e-05, | |
| "loss": 0.1089, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 7.3, | |
| "learning_rate": 1.87754413097032e-05, | |
| "loss": 0.0974, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 1.8763543817772606e-05, | |
| "loss": 0.1184, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 7.34, | |
| "learning_rate": 1.8751592615732007e-05, | |
| "loss": 0.1312, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "learning_rate": 1.8739587776828072e-05, | |
| "loss": 0.1343, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "learning_rate": 1.8727529374636194e-05, | |
| "loss": 0.1408, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "learning_rate": 1.871541748306005e-05, | |
| "loss": 0.1215, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 7.43, | |
| "learning_rate": 1.8703252176331148e-05, | |
| "loss": 0.1256, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "learning_rate": 1.8691033529008357e-05, | |
| "loss": 0.12, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 7.47, | |
| "learning_rate": 1.867876161597747e-05, | |
| "loss": 0.1089, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 7.49, | |
| "learning_rate": 1.8666436512450725e-05, | |
| "loss": 0.114, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 7.51, | |
| "learning_rate": 1.865405829396637e-05, | |
| "loss": 0.1015, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 7.53, | |
| "learning_rate": 1.8641627036388168e-05, | |
| "loss": 0.1488, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 7.55, | |
| "learning_rate": 1.862914281590496e-05, | |
| "loss": 0.1031, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 7.57, | |
| "learning_rate": 1.861660570903018e-05, | |
| "loss": 0.1052, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "learning_rate": 1.8604015792601395e-05, | |
| "loss": 0.1028, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 7.62, | |
| "learning_rate": 1.8591373143779837e-05, | |
| "loss": 0.1188, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "learning_rate": 1.8578677840049915e-05, | |
| "loss": 0.1091, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 1.856592995921876e-05, | |
| "loss": 0.1076, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 1.855312957941573e-05, | |
| "loss": 0.1121, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 7.7, | |
| "learning_rate": 1.854027677909194e-05, | |
| "loss": 0.0948, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "learning_rate": 1.852737163701979e-05, | |
| "loss": 0.1131, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 7.74, | |
| "learning_rate": 1.8514414232292462e-05, | |
| "loss": 0.1109, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 1.850140464432345e-05, | |
| "loss": 0.107, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 1.8488342952846074e-05, | |
| "loss": 0.1049, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "learning_rate": 1.8475229237912976e-05, | |
| "loss": 0.1282, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 7.83, | |
| "learning_rate": 1.8462063579895645e-05, | |
| "loss": 0.108, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 7.85, | |
| "learning_rate": 1.844884605948392e-05, | |
| "loss": 0.1, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 7.87, | |
| "learning_rate": 1.8435576757685497e-05, | |
| "loss": 0.0923, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 1.842225575582543e-05, | |
| "loss": 0.0976, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 7.91, | |
| "learning_rate": 1.8408883135545634e-05, | |
| "loss": 0.1149, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "learning_rate": 1.839545897880437e-05, | |
| "loss": 0.1024, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 7.96, | |
| "learning_rate": 1.838198336787578e-05, | |
| "loss": 0.0832, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 7.98, | |
| "learning_rate": 1.8368456385349333e-05, | |
| "loss": 0.1214, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 1.8354878114129368e-05, | |
| "loss": 0.1162, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "learning_rate": 1.8341248637434547e-05, | |
| "loss": 0.1073, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "learning_rate": 1.832756803879737e-05, | |
| "loss": 0.0918, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 1.8313836402063646e-05, | |
| "loss": 0.1112, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 1.8300053811392e-05, | |
| "loss": 0.1123, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 8.11, | |
| "learning_rate": 1.8286220351253324e-05, | |
| "loss": 0.1266, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 8.13, | |
| "learning_rate": 1.8272336106430296e-05, | |
| "loss": 0.1114, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 1.825840116201684e-05, | |
| "loss": 0.0999, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 8.17, | |
| "learning_rate": 1.8244415603417603e-05, | |
| "loss": 0.1146, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 8.19, | |
| "learning_rate": 1.8230379516347443e-05, | |
| "loss": 0.1118, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "learning_rate": 1.8216292986830895e-05, | |
| "loss": 0.1065, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 8.23, | |
| "learning_rate": 1.8202156101201646e-05, | |
| "loss": 0.0962, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 8.26, | |
| "learning_rate": 1.8187968946102e-05, | |
| "loss": 0.1029, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 1.8173731608482367e-05, | |
| "loss": 0.104, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 8.3, | |
| "learning_rate": 1.8159444175600706e-05, | |
| "loss": 0.0997, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 8.32, | |
| "learning_rate": 1.8145106735022e-05, | |
| "loss": 0.1168, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 8.34, | |
| "learning_rate": 1.813071937461773e-05, | |
| "loss": 0.1048, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "learning_rate": 1.8116282182565313e-05, | |
| "loss": 0.1032, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 8.38, | |
| "learning_rate": 1.8101795247347583e-05, | |
| "loss": 0.1151, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 8.4, | |
| "learning_rate": 1.8087258657752235e-05, | |
| "loss": 0.112, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 8.43, | |
| "learning_rate": 1.8072672502871295e-05, | |
| "loss": 0.0894, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 8.45, | |
| "learning_rate": 1.8058036872100556e-05, | |
| "loss": 0.1022, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 8.47, | |
| "learning_rate": 1.804335185513905e-05, | |
| "loss": 0.1017, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "learning_rate": 1.802861754198847e-05, | |
| "loss": 0.1063, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 8.51, | |
| "learning_rate": 1.801383402295265e-05, | |
| "loss": 0.0903, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 8.53, | |
| "learning_rate": 1.7999001388637003e-05, | |
| "loss": 0.093, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 8.55, | |
| "learning_rate": 1.7984119729947944e-05, | |
| "loss": 0.0844, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "learning_rate": 1.796918913809236e-05, | |
| "loss": 0.0895, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 8.6, | |
| "learning_rate": 1.7954209704577038e-05, | |
| "loss": 0.1175, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 1.79391815212081e-05, | |
| "loss": 0.0827, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 8.64, | |
| "learning_rate": 1.7924104680090456e-05, | |
| "loss": 0.0882, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 8.66, | |
| "learning_rate": 1.790897927362723e-05, | |
| "loss": 0.0825, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 1.7893805394519187e-05, | |
| "loss": 0.1147, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 8.7, | |
| "learning_rate": 1.7878583135764174e-05, | |
| "loss": 0.0908, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "learning_rate": 1.786331259065655e-05, | |
| "loss": 0.1088, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 1.7847993852786612e-05, | |
| "loss": 0.0865, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "learning_rate": 1.783262701604002e-05, | |
| "loss": 0.1193, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 8.79, | |
| "learning_rate": 1.7817212174597216e-05, | |
| "loss": 0.106, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 8.81, | |
| "learning_rate": 1.780174942293287e-05, | |
| "loss": 0.0879, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 8.83, | |
| "learning_rate": 1.7786238855815273e-05, | |
| "loss": 0.1403, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 8.85, | |
| "learning_rate": 1.7770680568305766e-05, | |
| "loss": 0.0929, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "learning_rate": 1.7755074655758174e-05, | |
| "loss": 0.1055, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 8.89, | |
| "learning_rate": 1.7739421213818183e-05, | |
| "loss": 0.0912, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "learning_rate": 1.77237203384228e-05, | |
| "loss": 0.0988, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 8.94, | |
| "learning_rate": 1.7707972125799738e-05, | |
| "loss": 0.1101, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 8.96, | |
| "learning_rate": 1.7692176672466813e-05, | |
| "loss": 0.0887, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "learning_rate": 1.7676334075231398e-05, | |
| "loss": 0.1239, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "learning_rate": 1.766044443118978e-05, | |
| "loss": 0.0918, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "learning_rate": 1.7644507837726602e-05, | |
| "loss": 0.0709, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "learning_rate": 1.7628524392514242e-05, | |
| "loss": 0.1171, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 1.761249419351222e-05, | |
| "loss": 0.0952, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 9.09, | |
| "learning_rate": 1.759641733896662e-05, | |
| "loss": 0.0881, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 9.11, | |
| "learning_rate": 1.7580293927409444e-05, | |
| "loss": 0.0869, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 9.13, | |
| "learning_rate": 1.7564124057658057e-05, | |
| "loss": 0.1035, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 9.15, | |
| "learning_rate": 1.7547907828814533e-05, | |
| "loss": 0.0925, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "learning_rate": 1.7531645340265098e-05, | |
| "loss": 0.1127, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 9.19, | |
| "learning_rate": 1.7515336691679478e-05, | |
| "loss": 0.1137, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 9.21, | |
| "learning_rate": 1.7498981983010305e-05, | |
| "loss": 0.0792, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 9.23, | |
| "learning_rate": 1.748258131449251e-05, | |
| "loss": 0.0962, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 9.26, | |
| "learning_rate": 1.746613478664271e-05, | |
| "loss": 0.0919, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 9.28, | |
| "learning_rate": 1.744964250025857e-05, | |
| "loss": 0.0842, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 9.3, | |
| "learning_rate": 1.743310455641821e-05, | |
| "loss": 0.0776, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "learning_rate": 1.7416521056479577e-05, | |
| "loss": 0.1005, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "learning_rate": 1.7399892102079814e-05, | |
| "loss": 0.1006, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 9.36, | |
| "learning_rate": 1.7383217795134656e-05, | |
| "loss": 0.0891, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 1.736649823783779e-05, | |
| "loss": 0.0836, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 9.4, | |
| "learning_rate": 1.7349733532660234e-05, | |
| "loss": 0.1009, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 9.43, | |
| "learning_rate": 1.7332923782349707e-05, | |
| "loss": 0.0987, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 9.45, | |
| "learning_rate": 1.7316069089930007e-05, | |
| "loss": 0.0895, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 9.47, | |
| "learning_rate": 1.7299169558700368e-05, | |
| "loss": 0.0972, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 9.49, | |
| "learning_rate": 1.7282225292234838e-05, | |
| "loss": 0.0876, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "learning_rate": 1.7265236394381634e-05, | |
| "loss": 0.0898, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 1.724820296926251e-05, | |
| "loss": 0.0891, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 9.55, | |
| "learning_rate": 1.7231125121272126e-05, | |
| "loss": 0.0915, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 9.57, | |
| "learning_rate": 1.7214002955077394e-05, | |
| "loss": 0.0862, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 9.6, | |
| "learning_rate": 1.7196836575616848e-05, | |
| "loss": 0.0663, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 9.62, | |
| "learning_rate": 1.717962608809999e-05, | |
| "loss": 0.0885, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 9.64, | |
| "learning_rate": 1.7162371598006668e-05, | |
| "loss": 0.0998, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 9.66, | |
| "learning_rate": 1.7145073211086393e-05, | |
| "loss": 0.0915, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 9.68, | |
| "learning_rate": 1.7127731033357726e-05, | |
| "loss": 0.1207, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "learning_rate": 1.711034517110761e-05, | |
| "loss": 0.1175, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 9.72, | |
| "learning_rate": 1.7092915730890718e-05, | |
| "loss": 0.1099, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 9.74, | |
| "learning_rate": 1.7075442819528812e-05, | |
| "loss": 0.1016, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 9.77, | |
| "learning_rate": 1.705792654411007e-05, | |
| "loss": 0.0895, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 9.79, | |
| "learning_rate": 1.7040367011988454e-05, | |
| "loss": 0.0774, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 9.81, | |
| "learning_rate": 1.7022764330783016e-05, | |
| "loss": 0.0951, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 9.83, | |
| "learning_rate": 1.7005118608377288e-05, | |
| "loss": 0.0899, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "learning_rate": 1.698742995291858e-05, | |
| "loss": 0.1076, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 9.87, | |
| "learning_rate": 1.696969847281732e-05, | |
| "loss": 0.095, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 9.89, | |
| "learning_rate": 1.6951924276746425e-05, | |
| "loss": 0.1016, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 9.91, | |
| "learning_rate": 1.6934107473640584e-05, | |
| "loss": 0.0894, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "learning_rate": 1.6916248172695636e-05, | |
| "loss": 0.0852, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 9.96, | |
| "learning_rate": 1.689834648336787e-05, | |
| "loss": 0.0833, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 9.98, | |
| "learning_rate": 1.688040251537337e-05, | |
| "loss": 0.0872, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 1.686241637868734e-05, | |
| "loss": 0.1001, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 10.02, | |
| "learning_rate": 1.6844388183543418e-05, | |
| "loss": 0.0997, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 10.04, | |
| "learning_rate": 1.6826318040433024e-05, | |
| "loss": 0.0771, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 10.06, | |
| "learning_rate": 1.6808206060104664e-05, | |
| "loss": 0.0948, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 10.09, | |
| "learning_rate": 1.6790052353563254e-05, | |
| "loss": 0.0912, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 10.11, | |
| "learning_rate": 1.6771857032069437e-05, | |
| "loss": 0.0759, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 10.13, | |
| "learning_rate": 1.675362020713892e-05, | |
| "loss": 0.099, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 10.15, | |
| "learning_rate": 1.6735341990541766e-05, | |
| "loss": 0.0902, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 10.17, | |
| "learning_rate": 1.6717022494301722e-05, | |
| "loss": 0.1102, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 10.19, | |
| "learning_rate": 1.669866183069553e-05, | |
| "loss": 0.0764, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 10.21, | |
| "learning_rate": 1.6680260112252253e-05, | |
| "loss": 0.1013, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 10.23, | |
| "learning_rate": 1.666181745175254e-05, | |
| "loss": 0.0714, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 10.26, | |
| "learning_rate": 1.6643333962227996e-05, | |
| "loss": 0.0881, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 10.28, | |
| "learning_rate": 1.6624809756960445e-05, | |
| "loss": 0.0942, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 10.3, | |
| "learning_rate": 1.660624494948125e-05, | |
| "loss": 0.087, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 10.32, | |
| "learning_rate": 1.658763965357063e-05, | |
| "loss": 0.1215, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 10.34, | |
| "learning_rate": 1.656899398325693e-05, | |
| "loss": 0.1002, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 10.36, | |
| "learning_rate": 1.655030805281596e-05, | |
| "loss": 0.0853, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 10.38, | |
| "learning_rate": 1.6531581976770264e-05, | |
| "loss": 0.0927, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 10.4, | |
| "learning_rate": 1.651281586988844e-05, | |
| "loss": 0.0794, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 10.43, | |
| "learning_rate": 1.6494009847184423e-05, | |
| "loss": 0.1001, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 10.45, | |
| "learning_rate": 1.6475164023916784e-05, | |
| "loss": 0.0929, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 10.47, | |
| "learning_rate": 1.6456278515588023e-05, | |
| "loss": 0.0982, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 10.49, | |
| "learning_rate": 1.643735343794387e-05, | |
| "loss": 0.1124, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 10.51, | |
| "learning_rate": 1.6418388906972565e-05, | |
| "loss": 0.0874, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 10.53, | |
| "learning_rate": 1.639938503890414e-05, | |
| "loss": 0.0741, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 10.55, | |
| "learning_rate": 1.6380341950209724e-05, | |
| "loss": 0.0812, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 10.57, | |
| "learning_rate": 1.6361259757600827e-05, | |
| "loss": 0.099, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 10.6, | |
| "learning_rate": 1.6342138578028613e-05, | |
| "loss": 0.0979, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 10.62, | |
| "learning_rate": 1.6322978528683192e-05, | |
| "loss": 0.0928, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 10.64, | |
| "learning_rate": 1.6303779726992895e-05, | |
| "loss": 0.0766, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 10.66, | |
| "learning_rate": 1.6284542290623568e-05, | |
| "loss": 0.0816, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 10.68, | |
| "learning_rate": 1.6265266337477833e-05, | |
| "loss": 0.0803, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 10.7, | |
| "learning_rate": 1.6245951985694378e-05, | |
| "loss": 0.0817, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 10.72, | |
| "learning_rate": 1.622659935364723e-05, | |
| "loss": 0.0868, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 10.74, | |
| "learning_rate": 1.6207208559945026e-05, | |
| "loss": 0.0773, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 10.77, | |
| "learning_rate": 1.6187779723430288e-05, | |
| "loss": 0.0622, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 10.79, | |
| "learning_rate": 1.61683129631787e-05, | |
| "loss": 0.0954, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 10.81, | |
| "learning_rate": 1.614880839849837e-05, | |
| "loss": 0.088, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 10.83, | |
| "learning_rate": 1.6129266148929096e-05, | |
| "loss": 0.0917, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 10.85, | |
| "learning_rate": 1.6109686334241655e-05, | |
| "loss": 0.0927, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 10.87, | |
| "learning_rate": 1.609006907443704e-05, | |
| "loss": 0.0828, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 10.89, | |
| "learning_rate": 1.6070414489745742e-05, | |
| "loss": 0.0812, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 10.91, | |
| "learning_rate": 1.605072270062701e-05, | |
| "loss": 0.0811, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 10.94, | |
| "learning_rate": 1.603099382776812e-05, | |
| "loss": 0.0821, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 10.96, | |
| "learning_rate": 1.6011227992083606e-05, | |
| "loss": 0.0708, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 10.98, | |
| "learning_rate": 1.599142531471456e-05, | |
| "loss": 0.0874, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "learning_rate": 1.5971585917027864e-05, | |
| "loss": 0.0787, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 11.02, | |
| "learning_rate": 1.5951709920615448e-05, | |
| "loss": 0.0847, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 11.04, | |
| "learning_rate": 1.5931797447293553e-05, | |
| "loss": 0.0718, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 11.06, | |
| "learning_rate": 1.5911848619101982e-05, | |
| "loss": 0.0785, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 11.09, | |
| "learning_rate": 1.589186355830334e-05, | |
| "loss": 0.0824, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 11.11, | |
| "learning_rate": 1.5871842387382307e-05, | |
| "loss": 0.0739, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 11.13, | |
| "learning_rate": 1.5851785229044867e-05, | |
| "loss": 0.0753, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 11.15, | |
| "learning_rate": 1.583169220621757e-05, | |
| "loss": 0.0804, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 11.17, | |
| "learning_rate": 1.5811563442046768e-05, | |
| "loss": 0.083, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 11.19, | |
| "learning_rate": 1.5791399059897864e-05, | |
| "loss": 0.0862, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 11.21, | |
| "learning_rate": 1.5771199183354565e-05, | |
| "loss": 0.0693, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 11.23, | |
| "learning_rate": 1.5750963936218104e-05, | |
| "loss": 0.0778, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 11.26, | |
| "learning_rate": 1.573069344250651e-05, | |
| "loss": 0.09, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 11.28, | |
| "learning_rate": 1.5710387826453813e-05, | |
| "loss": 0.0922, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 11.3, | |
| "learning_rate": 1.5690047212509317e-05, | |
| "loss": 0.0921, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 11.32, | |
| "learning_rate": 1.566967172533681e-05, | |
| "loss": 0.0726, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 11.34, | |
| "learning_rate": 1.564926148981382e-05, | |
| "loss": 0.0717, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 11.36, | |
| "learning_rate": 1.5628816631030837e-05, | |
| "loss": 0.0918, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 11.38, | |
| "learning_rate": 1.560833727429055e-05, | |
| "loss": 0.1104, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 11.4, | |
| "learning_rate": 1.5587823545107084e-05, | |
| "loss": 0.0707, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 11.43, | |
| "learning_rate": 1.5567275569205216e-05, | |
| "loss": 0.0933, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 11.45, | |
| "learning_rate": 1.554669347251963e-05, | |
| "loss": 0.0733, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 11.47, | |
| "learning_rate": 1.552607738119411e-05, | |
| "loss": 0.0738, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 11.49, | |
| "learning_rate": 1.550542742158081e-05, | |
| "loss": 0.0754, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 11.51, | |
| "learning_rate": 1.5484743720239435e-05, | |
| "loss": 0.1004, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 11.53, | |
| "learning_rate": 1.54640264039365e-05, | |
| "loss": 0.096, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 11.55, | |
| "learning_rate": 1.5443275599644537e-05, | |
| "loss": 0.0799, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 11.57, | |
| "learning_rate": 1.5422491434541318e-05, | |
| "loss": 0.0919, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 11.6, | |
| "learning_rate": 1.540167403600908e-05, | |
| "loss": 0.0836, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 11.62, | |
| "learning_rate": 1.5380823531633727e-05, | |
| "loss": 0.0899, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 11.64, | |
| "learning_rate": 1.5359940049204088e-05, | |
| "loss": 0.0695, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 11.66, | |
| "learning_rate": 1.533902371671109e-05, | |
| "loss": 0.0743, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 11.68, | |
| "learning_rate": 1.5318074662346995e-05, | |
| "loss": 0.1106, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 11.7, | |
| "learning_rate": 1.5297093014504616e-05, | |
| "loss": 0.0816, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 11.72, | |
| "learning_rate": 1.5276078901776518e-05, | |
| "loss": 0.0836, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 11.74, | |
| "learning_rate": 1.5255032452954246e-05, | |
| "loss": 0.0773, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 11.77, | |
| "learning_rate": 1.5233953797027519e-05, | |
| "loss": 0.0644, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 11.79, | |
| "learning_rate": 1.521284306318345e-05, | |
| "loss": 0.0844, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 11.81, | |
| "learning_rate": 1.5191700380805754e-05, | |
| "loss": 0.0946, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 11.83, | |
| "learning_rate": 1.517052587947395e-05, | |
| "loss": 0.0909, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 11.85, | |
| "learning_rate": 1.514931968896257e-05, | |
| "loss": 0.0807, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 11.87, | |
| "learning_rate": 1.5128081939240357e-05, | |
| "loss": 0.0734, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 11.89, | |
| "learning_rate": 1.5106812760469489e-05, | |
| "loss": 0.0848, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 11.91, | |
| "learning_rate": 1.5085512283004752e-05, | |
| "loss": 0.0789, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 11.94, | |
| "learning_rate": 1.5064180637392765e-05, | |
| "loss": 0.0895, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 11.96, | |
| "learning_rate": 1.5042817954371167e-05, | |
| "loss": 0.0875, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 11.98, | |
| "learning_rate": 1.5021424364867816e-05, | |
| "loss": 0.0687, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.0897, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 12.02, | |
| "learning_rate": 1.4978544991073618e-05, | |
| "loss": 0.074, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 12.04, | |
| "learning_rate": 1.4957059469582373e-05, | |
| "loss": 0.083, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 12.06, | |
| "learning_rate": 1.4935543567206984e-05, | |
| "loss": 0.0771, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 12.09, | |
| "learning_rate": 1.4913997415814367e-05, | |
| "loss": 0.0796, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 12.11, | |
| "learning_rate": 1.4892421147456827e-05, | |
| "loss": 0.0681, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 12.13, | |
| "learning_rate": 1.4870814894371245e-05, | |
| "loss": 0.105, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 12.15, | |
| "learning_rate": 1.484917878897829e-05, | |
| "loss": 0.0957, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 12.17, | |
| "learning_rate": 1.4827512963881565e-05, | |
| "loss": 0.066, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 12.19, | |
| "learning_rate": 1.4805817551866839e-05, | |
| "loss": 0.0948, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 12.21, | |
| "learning_rate": 1.4784092685901208e-05, | |
| "loss": 0.1076, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 12.23, | |
| "learning_rate": 1.476233849913228e-05, | |
| "loss": 0.0816, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 12.26, | |
| "learning_rate": 1.4740555124887377e-05, | |
| "loss": 0.0663, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 12.28, | |
| "learning_rate": 1.4718742696672692e-05, | |
| "loss": 0.0773, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 12.3, | |
| "learning_rate": 1.4696901348172494e-05, | |
| "loss": 0.0717, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 12.32, | |
| "learning_rate": 1.4675031213248296e-05, | |
| "loss": 0.0892, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 12.34, | |
| "learning_rate": 1.4653132425938032e-05, | |
| "loss": 0.0888, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 12.36, | |
| "learning_rate": 1.4631205120455255e-05, | |
| "loss": 0.0905, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 12.38, | |
| "learning_rate": 1.460924943118828e-05, | |
| "loss": 0.0845, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 12.4, | |
| "learning_rate": 1.4587265492699403e-05, | |
| "loss": 0.0685, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 12.43, | |
| "learning_rate": 1.4565253439724042e-05, | |
| "loss": 0.0624, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 12.45, | |
| "learning_rate": 1.4543213407169919e-05, | |
| "loss": 0.0854, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 12.47, | |
| "learning_rate": 1.4521145530116248e-05, | |
| "loss": 0.0861, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 12.49, | |
| "learning_rate": 1.449904994381289e-05, | |
| "loss": 0.0757, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 12.51, | |
| "learning_rate": 1.447692678367954e-05, | |
| "loss": 0.0837, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 12.53, | |
| "learning_rate": 1.4454776185304871e-05, | |
| "loss": 0.0756, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 12.55, | |
| "learning_rate": 1.4432598284445738e-05, | |
| "loss": 0.0877, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 12.57, | |
| "learning_rate": 1.4410393217026317e-05, | |
| "loss": 0.0684, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 12.6, | |
| "learning_rate": 1.4388161119137284e-05, | |
| "loss": 0.0789, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 12.62, | |
| "learning_rate": 1.436590212703498e-05, | |
| "loss": 0.083, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 12.64, | |
| "learning_rate": 1.4343616377140581e-05, | |
| "loss": 0.0597, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 12.66, | |
| "learning_rate": 1.4321304006039247e-05, | |
| "loss": 0.0776, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 12.68, | |
| "learning_rate": 1.4298965150479305e-05, | |
| "loss": 0.0922, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 12.7, | |
| "learning_rate": 1.4276599947371388e-05, | |
| "loss": 0.0831, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 12.72, | |
| "learning_rate": 1.4254208533787619e-05, | |
| "loss": 0.0766, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 12.74, | |
| "learning_rate": 1.4231791046960752e-05, | |
| "loss": 0.0739, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 12.77, | |
| "learning_rate": 1.4209347624283352e-05, | |
| "loss": 0.0928, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 12.79, | |
| "learning_rate": 1.418687840330692e-05, | |
| "loss": 0.0938, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 12.81, | |
| "learning_rate": 1.4164383521741085e-05, | |
| "loss": 0.0953, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 12.83, | |
| "learning_rate": 1.4141863117452746e-05, | |
| "loss": 0.0636, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 12.85, | |
| "learning_rate": 1.411931732846521e-05, | |
| "loss": 0.0831, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 12.87, | |
| "learning_rate": 1.4096746292957383e-05, | |
| "loss": 0.0755, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 12.89, | |
| "learning_rate": 1.4074150149262882e-05, | |
| "loss": 0.1028, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 12.91, | |
| "learning_rate": 1.4051529035869225e-05, | |
| "loss": 0.0551, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 12.94, | |
| "learning_rate": 1.4028883091416953e-05, | |
| "loss": 0.0906, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 12.96, | |
| "learning_rate": 1.4006212454698798e-05, | |
| "loss": 0.0941, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 12.98, | |
| "learning_rate": 1.3983517264658821e-05, | |
| "loss": 0.0943, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "learning_rate": 1.396079766039157e-05, | |
| "loss": 0.0796, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 13.02, | |
| "learning_rate": 1.3938053781141224e-05, | |
| "loss": 0.0782, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 13.04, | |
| "learning_rate": 1.391528576630073e-05, | |
| "loss": 0.0626, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 13.06, | |
| "learning_rate": 1.3892493755410971e-05, | |
| "loss": 0.0676, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 13.09, | |
| "learning_rate": 1.3869677888159887e-05, | |
| "loss": 0.0623, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 13.11, | |
| "learning_rate": 1.3846838304381635e-05, | |
| "loss": 0.0678, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 13.13, | |
| "learning_rate": 1.3823975144055722e-05, | |
| "loss": 0.0876, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 13.15, | |
| "learning_rate": 1.3801088547306149e-05, | |
| "loss": 0.0635, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 13.17, | |
| "learning_rate": 1.3778178654400565e-05, | |
| "loss": 0.0924, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 13.19, | |
| "learning_rate": 1.3755245605749386e-05, | |
| "loss": 0.0725, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 13.21, | |
| "learning_rate": 1.3732289541904948e-05, | |
| "loss": 0.0729, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 13.23, | |
| "learning_rate": 1.3709310603560648e-05, | |
| "loss": 0.0765, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 13.26, | |
| "learning_rate": 1.3686308931550073e-05, | |
| "loss": 0.086, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 13.28, | |
| "learning_rate": 1.3663284666846135e-05, | |
| "loss": 0.0782, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 13.3, | |
| "learning_rate": 1.3640237950560218e-05, | |
| "loss": 0.0686, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 13.32, | |
| "learning_rate": 1.3617168923941311e-05, | |
| "loss": 0.0803, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 13.34, | |
| "learning_rate": 1.3594077728375129e-05, | |
| "loss": 0.0853, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 13.36, | |
| "learning_rate": 1.3570964505383268e-05, | |
| "loss": 0.0675, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 13.38, | |
| "learning_rate": 1.3547829396622314e-05, | |
| "loss": 0.0738, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 13.4, | |
| "learning_rate": 1.3524672543882997e-05, | |
| "loss": 0.0943, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 13.43, | |
| "learning_rate": 1.3501494089089307e-05, | |
| "loss": 0.0792, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 13.45, | |
| "learning_rate": 1.3478294174297623e-05, | |
| "loss": 0.063, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 13.47, | |
| "learning_rate": 1.3455072941695863e-05, | |
| "loss": 0.1035, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 13.49, | |
| "learning_rate": 1.3431830533602584e-05, | |
| "loss": 0.0605, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 13.51, | |
| "learning_rate": 1.3408567092466132e-05, | |
| "loss": 0.0878, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 13.53, | |
| "learning_rate": 1.3385282760863758e-05, | |
| "loss": 0.0853, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 13.55, | |
| "learning_rate": 1.336197768150074e-05, | |
| "loss": 0.0774, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 13.57, | |
| "learning_rate": 1.3338651997209532e-05, | |
| "loss": 0.0801, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 13.6, | |
| "learning_rate": 1.3315305850948848e-05, | |
| "loss": 0.0627, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 13.62, | |
| "learning_rate": 1.3291939385802832e-05, | |
| "loss": 0.0769, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 13.64, | |
| "learning_rate": 1.3268552744980148e-05, | |
| "loss": 0.0977, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 13.66, | |
| "learning_rate": 1.3245146071813114e-05, | |
| "loss": 0.0795, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 13.68, | |
| "learning_rate": 1.3221719509756821e-05, | |
| "loss": 0.0829, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 13.7, | |
| "learning_rate": 1.3198273202388257e-05, | |
| "loss": 0.0664, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 13.72, | |
| "learning_rate": 1.3174807293405427e-05, | |
| "loss": 0.0772, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 13.74, | |
| "learning_rate": 1.3151321926626466e-05, | |
| "loss": 0.0837, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 13.77, | |
| "learning_rate": 1.312781724598877e-05, | |
| "loss": 0.0834, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 13.79, | |
| "learning_rate": 1.3104293395548098e-05, | |
| "loss": 0.0964, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 13.81, | |
| "learning_rate": 1.3080750519477699e-05, | |
| "loss": 0.0699, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 13.83, | |
| "learning_rate": 1.3057188762067428e-05, | |
| "loss": 0.0696, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 13.85, | |
| "learning_rate": 1.303360826772286e-05, | |
| "loss": 0.1058, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 13.87, | |
| "learning_rate": 1.3010009180964407e-05, | |
| "loss": 0.0684, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 13.89, | |
| "learning_rate": 1.2986391646426425e-05, | |
| "loss": 0.0783, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 13.91, | |
| "learning_rate": 1.2962755808856341e-05, | |
| "loss": 0.0726, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 13.94, | |
| "learning_rate": 1.2939101813113747e-05, | |
| "loss": 0.0858, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 13.96, | |
| "learning_rate": 1.2915429804169537e-05, | |
| "loss": 0.065, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 13.98, | |
| "learning_rate": 1.2891739927104992e-05, | |
| "loss": 0.0864, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "learning_rate": 1.2868032327110904e-05, | |
| "loss": 0.0599, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 14.02, | |
| "learning_rate": 1.2844307149486696e-05, | |
| "loss": 0.0654, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 14.04, | |
| "learning_rate": 1.2820564539639512e-05, | |
| "loss": 0.0794, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 14.06, | |
| "learning_rate": 1.279680464308334e-05, | |
| "loss": 0.0609, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 14.09, | |
| "learning_rate": 1.2773027605438107e-05, | |
| "loss": 0.0809, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 14.11, | |
| "learning_rate": 1.2749233572428805e-05, | |
| "loss": 0.0683, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 14.13, | |
| "learning_rate": 1.2725422689884578e-05, | |
| "loss": 0.0692, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 14.15, | |
| "learning_rate": 1.2701595103737846e-05, | |
| "loss": 0.0771, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 14.17, | |
| "learning_rate": 1.2677750960023396e-05, | |
| "loss": 0.0623, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 14.19, | |
| "learning_rate": 1.2653890404877495e-05, | |
| "loss": 0.0783, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 14.21, | |
| "learning_rate": 1.2630013584536993e-05, | |
| "loss": 0.0968, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 14.23, | |
| "learning_rate": 1.2606120645338428e-05, | |
| "loss": 0.0681, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 14.26, | |
| "learning_rate": 1.2582211733717123e-05, | |
| "loss": 0.0712, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 14.28, | |
| "learning_rate": 1.2558286996206298e-05, | |
| "loss": 0.0727, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 14.3, | |
| "learning_rate": 1.2534346579436158e-05, | |
| "loss": 0.0648, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 14.32, | |
| "learning_rate": 1.251039063013302e-05, | |
| "loss": 0.0697, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 14.34, | |
| "learning_rate": 1.248641929511838e-05, | |
| "loss": 0.0954, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 14.36, | |
| "learning_rate": 1.246243272130804e-05, | |
| "loss": 0.0893, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 14.38, | |
| "learning_rate": 1.2438431055711192e-05, | |
| "loss": 0.082, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 14.4, | |
| "learning_rate": 1.2414414445429525e-05, | |
| "loss": 0.0636, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 14.43, | |
| "learning_rate": 1.2390383037656326e-05, | |
| "loss": 0.083, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 14.45, | |
| "learning_rate": 1.2366336979675561e-05, | |
| "loss": 0.0728, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 14.47, | |
| "learning_rate": 1.2342276418861e-05, | |
| "loss": 0.0854, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 14.49, | |
| "learning_rate": 1.2318201502675285e-05, | |
| "loss": 0.0707, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 14.51, | |
| "learning_rate": 1.2294112378669043e-05, | |
| "loss": 0.0892, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 14.53, | |
| "learning_rate": 1.2270009194479985e-05, | |
| "loss": 0.0752, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 14.55, | |
| "learning_rate": 1.2245892097831982e-05, | |
| "loss": 0.0838, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 14.57, | |
| "learning_rate": 1.2221761236534185e-05, | |
| "loss": 0.0691, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 14.6, | |
| "learning_rate": 1.2197616758480093e-05, | |
| "loss": 0.0864, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 14.62, | |
| "learning_rate": 1.217345881164667e-05, | |
| "loss": 0.0734, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 14.64, | |
| "learning_rate": 1.2149287544093425e-05, | |
| "loss": 0.0682, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 14.66, | |
| "learning_rate": 1.21251031039615e-05, | |
| "loss": 0.0628, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 14.68, | |
| "learning_rate": 1.210090563947278e-05, | |
| "loss": 0.0808, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 14.7, | |
| "learning_rate": 1.2076695298928966e-05, | |
| "loss": 0.0786, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 14.72, | |
| "learning_rate": 1.2052472230710679e-05, | |
| "loss": 0.0758, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 14.74, | |
| "learning_rate": 1.2028236583276543e-05, | |
| "loss": 0.0699, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 14.77, | |
| "learning_rate": 1.2003988505162277e-05, | |
| "loss": 0.0602, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 14.79, | |
| "learning_rate": 1.1979728144979784e-05, | |
| "loss": 0.0765, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 14.81, | |
| "learning_rate": 1.1955455651416246e-05, | |
| "loss": 0.0753, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 14.83, | |
| "learning_rate": 1.1931171173233204e-05, | |
| "loss": 0.0577, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 14.85, | |
| "learning_rate": 1.1906874859265658e-05, | |
| "loss": 0.0483, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 14.87, | |
| "learning_rate": 1.1882566858421137e-05, | |
| "loss": 0.0896, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 14.89, | |
| "learning_rate": 1.1858247319678799e-05, | |
| "loss": 0.1037, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 14.91, | |
| "learning_rate": 1.1833916392088523e-05, | |
| "loss": 0.0626, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 14.94, | |
| "learning_rate": 1.1809574224769983e-05, | |
| "loss": 0.0755, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 14.96, | |
| "learning_rate": 1.1785220966911735e-05, | |
| "loss": 0.0795, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 14.98, | |
| "learning_rate": 1.1760856767770314e-05, | |
| "loss": 0.0764, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "learning_rate": 1.1736481776669307e-05, | |
| "loss": 0.0706, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 15.02, | |
| "learning_rate": 1.1712096142998447e-05, | |
| "loss": 0.0695, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 15.04, | |
| "learning_rate": 1.1687700016212688e-05, | |
| "loss": 0.0699, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 15.06, | |
| "learning_rate": 1.1663293545831302e-05, | |
| "loss": 0.0747, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 15.09, | |
| "learning_rate": 1.1638876881436951e-05, | |
| "loss": 0.0659, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 15.11, | |
| "learning_rate": 1.1614450172674766e-05, | |
| "loss": 0.1061, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 15.13, | |
| "learning_rate": 1.1590013569251457e-05, | |
| "loss": 0.0996, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 15.15, | |
| "learning_rate": 1.1565567220934354e-05, | |
| "loss": 0.0623, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 15.17, | |
| "learning_rate": 1.1541111277550534e-05, | |
| "loss": 0.0765, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 15.19, | |
| "learning_rate": 1.151664588898586e-05, | |
| "loss": 0.0729, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 15.21, | |
| "learning_rate": 1.14921712051841e-05, | |
| "loss": 0.0574, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 15.23, | |
| "learning_rate": 1.1467687376145975e-05, | |
| "loss": 0.0523, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 15.26, | |
| "learning_rate": 1.1443194551928267e-05, | |
| "loss": 0.0764, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 15.28, | |
| "learning_rate": 1.1418692882642886e-05, | |
| "loss": 0.1102, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 15.3, | |
| "learning_rate": 1.1394182518455946e-05, | |
| "loss": 0.0728, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 15.32, | |
| "learning_rate": 1.1369663609586853e-05, | |
| "loss": 0.0677, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 15.34, | |
| "learning_rate": 1.1345136306307391e-05, | |
| "loss": 0.0744, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 15.36, | |
| "learning_rate": 1.1320600758940773e-05, | |
| "loss": 0.0618, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 15.38, | |
| "learning_rate": 1.129605711786076e-05, | |
| "loss": 0.0856, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 15.4, | |
| "learning_rate": 1.1271505533490694e-05, | |
| "loss": 0.0893, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 15.43, | |
| "learning_rate": 1.1246946156302625e-05, | |
| "loss": 0.0668, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 15.45, | |
| "learning_rate": 1.1222379136816347e-05, | |
| "loss": 0.0665, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 15.47, | |
| "learning_rate": 1.1197804625598495e-05, | |
| "loss": 0.0819, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 15.49, | |
| "learning_rate": 1.1173222773261623e-05, | |
| "loss": 0.0559, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 15.51, | |
| "learning_rate": 1.1148633730463274e-05, | |
| "loss": 0.0993, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 15.53, | |
| "learning_rate": 1.1124037647905064e-05, | |
| "loss": 0.061, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 15.55, | |
| "learning_rate": 1.1099434676331751e-05, | |
| "loss": 0.0801, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 15.57, | |
| "learning_rate": 1.1074824966530312e-05, | |
| "loss": 0.0642, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 15.6, | |
| "learning_rate": 1.1050208669329035e-05, | |
| "loss": 0.0881, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 15.62, | |
| "learning_rate": 1.1025585935596556e-05, | |
| "loss": 0.0677, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 15.64, | |
| "learning_rate": 1.1000956916240985e-05, | |
| "loss": 0.0773, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 15.66, | |
| "learning_rate": 1.097632176220894e-05, | |
| "loss": 0.0836, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 15.68, | |
| "learning_rate": 1.0951680624484641e-05, | |
| "loss": 0.0623, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 15.7, | |
| "learning_rate": 1.0927033654088983e-05, | |
| "loss": 0.0673, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 15.72, | |
| "learning_rate": 1.0902381002078611e-05, | |
| "loss": 0.0617, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 15.74, | |
| "learning_rate": 1.0877722819544978e-05, | |
| "loss": 0.058, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 15.77, | |
| "learning_rate": 1.0853059257613447e-05, | |
| "loss": 0.0694, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 15.79, | |
| "learning_rate": 1.082839046744235e-05, | |
| "loss": 0.0957, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 15.81, | |
| "learning_rate": 1.0803716600222048e-05, | |
| "loss": 0.061, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 15.83, | |
| "learning_rate": 1.0779037807174032e-05, | |
| "loss": 0.0805, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 15.85, | |
| "learning_rate": 1.075435423954998e-05, | |
| "loss": 0.0751, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 15.87, | |
| "learning_rate": 1.0729666048630828e-05, | |
| "loss": 0.0729, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 15.89, | |
| "learning_rate": 1.0704973385725853e-05, | |
| "loss": 0.0744, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 15.91, | |
| "learning_rate": 1.0680276402171727e-05, | |
| "loss": 0.0823, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 15.94, | |
| "learning_rate": 1.0655575249331622e-05, | |
| "loss": 0.0854, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 15.96, | |
| "learning_rate": 1.0630870078594249e-05, | |
| "loss": 0.0737, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 15.98, | |
| "learning_rate": 1.0606161041372948e-05, | |
| "loss": 0.069, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "learning_rate": 1.0581448289104759e-05, | |
| "loss": 0.0751, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 16.02, | |
| "learning_rate": 1.0556731973249486e-05, | |
| "loss": 0.0771, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 16.04, | |
| "learning_rate": 1.053201224528877e-05, | |
| "loss": 0.0619, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 16.06, | |
| "learning_rate": 1.0507289256725173e-05, | |
| "loss": 0.0845, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 16.09, | |
| "learning_rate": 1.0482563159081238e-05, | |
| "loss": 0.0712, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 16.11, | |
| "learning_rate": 1.0457834103898559e-05, | |
| "loss": 0.0668, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 16.13, | |
| "learning_rate": 1.0433102242736863e-05, | |
| "loss": 0.0585, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 16.15, | |
| "learning_rate": 1.0408367727173067e-05, | |
| "loss": 0.0765, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 16.17, | |
| "learning_rate": 1.0383630708800366e-05, | |
| "loss": 0.1022, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 16.19, | |
| "learning_rate": 1.035889133922728e-05, | |
| "loss": 0.046, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 16.21, | |
| "learning_rate": 1.0334149770076747e-05, | |
| "loss": 0.0934, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 16.23, | |
| "learning_rate": 1.0309406152985194e-05, | |
| "loss": 0.062, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 16.26, | |
| "learning_rate": 1.028466063960158e-05, | |
| "loss": 0.0584, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 16.28, | |
| "learning_rate": 1.025991338158651e-05, | |
| "loss": 0.0739, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 16.3, | |
| "learning_rate": 1.0235164530611259e-05, | |
| "loss": 0.0681, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 16.32, | |
| "learning_rate": 1.0210414238356879e-05, | |
| "loss": 0.0708, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 16.34, | |
| "learning_rate": 1.0185662656513251e-05, | |
| "loss": 0.1001, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 16.36, | |
| "learning_rate": 1.0160909936778156e-05, | |
| "loss": 0.0656, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 16.38, | |
| "learning_rate": 1.0136156230856357e-05, | |
| "loss": 0.0801, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 16.4, | |
| "learning_rate": 1.0111401690458655e-05, | |
| "loss": 0.0594, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 16.43, | |
| "learning_rate": 1.008664646730097e-05, | |
| "loss": 0.0758, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 16.45, | |
| "learning_rate": 1.0061890713103401e-05, | |
| "loss": 0.0737, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 16.47, | |
| "learning_rate": 1.0037134579589303e-05, | |
| "loss": 0.0634, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 16.49, | |
| "learning_rate": 1.001237821848436e-05, | |
| "loss": 0.065, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 16.51, | |
| "learning_rate": 9.987621781515647e-06, | |
| "loss": 0.0748, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 16.53, | |
| "learning_rate": 9.962865420410702e-06, | |
| "loss": 0.067, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 16.55, | |
| "learning_rate": 9.938109286896604e-06, | |
| "loss": 0.0669, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 16.57, | |
| "learning_rate": 9.913353532699035e-06, | |
| "loss": 0.0767, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 16.6, | |
| "learning_rate": 9.888598309541347e-06, | |
| "loss": 0.0547, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 16.62, | |
| "learning_rate": 9.863843769143646e-06, | |
| "loss": 0.0664, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 16.64, | |
| "learning_rate": 9.83909006322185e-06, | |
| "loss": 0.063, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 16.66, | |
| "learning_rate": 9.814337343486754e-06, | |
| "loss": 0.073, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 16.68, | |
| "learning_rate": 9.789585761643126e-06, | |
| "loss": 0.0587, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 16.7, | |
| "learning_rate": 9.764835469388745e-06, | |
| "loss": 0.064, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 16.72, | |
| "learning_rate": 9.740086618413495e-06, | |
| "loss": 0.0887, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 16.74, | |
| "learning_rate": 9.715339360398421e-06, | |
| "loss": 0.0711, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 16.77, | |
| "learning_rate": 9.690593847014811e-06, | |
| "loss": 0.0569, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 16.79, | |
| "learning_rate": 9.665850229923258e-06, | |
| "loss": 0.0744, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 16.81, | |
| "learning_rate": 9.641108660772727e-06, | |
| "loss": 0.0688, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 16.83, | |
| "learning_rate": 9.616369291199641e-06, | |
| "loss": 0.0609, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 16.85, | |
| "learning_rate": 9.591632272826935e-06, | |
| "loss": 0.07, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 16.87, | |
| "learning_rate": 9.56689775726314e-06, | |
| "loss": 0.0668, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 16.89, | |
| "learning_rate": 9.542165896101445e-06, | |
| "loss": 0.0987, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 16.91, | |
| "learning_rate": 9.517436840918767e-06, | |
| "loss": 0.0572, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 16.94, | |
| "learning_rate": 9.492710743274832e-06, | |
| "loss": 0.0715, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 16.96, | |
| "learning_rate": 9.467987754711236e-06, | |
| "loss": 0.0833, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 16.98, | |
| "learning_rate": 9.443268026750521e-06, | |
| "loss": 0.0751, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 17.0, | |
| "learning_rate": 9.418551710895243e-06, | |
| "loss": 0.1096, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 17.02, | |
| "learning_rate": 9.393838958627053e-06, | |
| "loss": 0.055, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 17.04, | |
| "learning_rate": 9.369129921405754e-06, | |
| "loss": 0.0736, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 17.06, | |
| "learning_rate": 9.34442475066838e-06, | |
| "loss": 0.0643, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 17.09, | |
| "learning_rate": 9.319723597828276e-06, | |
| "loss": 0.0781, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 17.11, | |
| "learning_rate": 9.295026614274153e-06, | |
| "loss": 0.0619, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 17.13, | |
| "learning_rate": 9.270333951369173e-06, | |
| "loss": 0.06, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 17.15, | |
| "learning_rate": 9.245645760450024e-06, | |
| "loss": 0.0718, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 17.17, | |
| "learning_rate": 9.22096219282597e-06, | |
| "loss": 0.0691, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 17.19, | |
| "learning_rate": 9.196283399777956e-06, | |
| "loss": 0.0847, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 17.21, | |
| "learning_rate": 9.171609532557654e-06, | |
| "loss": 0.0965, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 17.23, | |
| "learning_rate": 9.146940742386555e-06, | |
| "loss": 0.0843, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 17.26, | |
| "learning_rate": 9.122277180455024e-06, | |
| "loss": 0.0671, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 17.28, | |
| "learning_rate": 9.097618997921394e-06, | |
| "loss": 0.063, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 17.3, | |
| "learning_rate": 9.07296634591102e-06, | |
| "loss": 0.0506, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 17.32, | |
| "learning_rate": 9.04831937551536e-06, | |
| "loss": 0.0849, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 17.34, | |
| "learning_rate": 9.023678237791064e-06, | |
| "loss": 0.0615, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 17.36, | |
| "learning_rate": 8.999043083759016e-06, | |
| "loss": 0.053, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 17.38, | |
| "learning_rate": 8.974414064403447e-06, | |
| "loss": 0.0625, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 17.4, | |
| "learning_rate": 8.949791330670969e-06, | |
| "loss": 0.0683, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 17.43, | |
| "learning_rate": 8.92517503346969e-06, | |
| "loss": 0.0618, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 17.45, | |
| "learning_rate": 8.900565323668254e-06, | |
| "loss": 0.0597, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 17.47, | |
| "learning_rate": 8.875962352094937e-06, | |
| "loss": 0.0708, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 17.49, | |
| "learning_rate": 8.85136626953673e-06, | |
| "loss": 0.0685, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 17.51, | |
| "learning_rate": 8.82677722673838e-06, | |
| "loss": 0.0685, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 17.53, | |
| "learning_rate": 8.802195374401509e-06, | |
| "loss": 0.0554, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 17.55, | |
| "learning_rate": 8.777620863183658e-06, | |
| "loss": 0.0642, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 17.57, | |
| "learning_rate": 8.753053843697376e-06, | |
| "loss": 0.0755, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 17.6, | |
| "learning_rate": 8.728494466509308e-06, | |
| "loss": 0.0567, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 17.62, | |
| "learning_rate": 8.703942882139244e-06, | |
| "loss": 0.0678, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 17.64, | |
| "learning_rate": 8.679399241059228e-06, | |
| "loss": 0.0727, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 17.66, | |
| "learning_rate": 8.654863693692612e-06, | |
| "loss": 0.0657, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 17.68, | |
| "learning_rate": 8.630336390413148e-06, | |
| "loss": 0.0743, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 17.7, | |
| "learning_rate": 8.60581748154406e-06, | |
| "loss": 0.0748, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 17.72, | |
| "learning_rate": 8.581307117357117e-06, | |
| "loss": 0.1156, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 17.74, | |
| "learning_rate": 8.556805448071736e-06, | |
| "loss": 0.0869, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 17.77, | |
| "learning_rate": 8.532312623854027e-06, | |
| "loss": 0.0881, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 17.79, | |
| "learning_rate": 8.507828794815904e-06, | |
| "loss": 0.0884, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 17.81, | |
| "learning_rate": 8.483354111014142e-06, | |
| "loss": 0.0941, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 17.83, | |
| "learning_rate": 8.458888722449468e-06, | |
| "loss": 0.061, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 17.85, | |
| "learning_rate": 8.434432779065647e-06, | |
| "loss": 0.0621, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 17.87, | |
| "learning_rate": 8.409986430748545e-06, | |
| "loss": 0.0473, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 17.89, | |
| "learning_rate": 8.385549827325235e-06, | |
| "loss": 0.0869, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 17.91, | |
| "learning_rate": 8.361123118563052e-06, | |
| "loss": 0.0597, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 17.94, | |
| "learning_rate": 8.336706454168701e-06, | |
| "loss": 0.079, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 17.96, | |
| "learning_rate": 8.312299983787315e-06, | |
| "loss": 0.062, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 17.98, | |
| "learning_rate": 8.287903857001557e-06, | |
| "loss": 0.0641, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 18.0, | |
| "learning_rate": 8.263518223330698e-06, | |
| "loss": 0.0752, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 18.02, | |
| "learning_rate": 8.23914323222969e-06, | |
| "loss": 0.0646, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 18.04, | |
| "learning_rate": 8.214779033088269e-06, | |
| "loss": 0.0652, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 18.06, | |
| "learning_rate": 8.19042577523002e-06, | |
| "loss": 0.0739, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 18.09, | |
| "learning_rate": 8.166083607911479e-06, | |
| "loss": 0.0585, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 18.11, | |
| "learning_rate": 8.141752680321203e-06, | |
| "loss": 0.0719, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 18.13, | |
| "learning_rate": 8.117433141578865e-06, | |
| "loss": 0.0598, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 18.15, | |
| "learning_rate": 8.093125140734343e-06, | |
| "loss": 0.062, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 18.17, | |
| "learning_rate": 8.068828826766794e-06, | |
| "loss": 0.062, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 18.19, | |
| "learning_rate": 8.044544348583756e-06, | |
| "loss": 0.0807, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 18.21, | |
| "learning_rate": 8.020271855020218e-06, | |
| "loss": 0.0681, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 18.23, | |
| "learning_rate": 7.996011494837725e-06, | |
| "loss": 0.0636, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 18.26, | |
| "learning_rate": 7.971763416723458e-06, | |
| "loss": 0.0492, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 18.28, | |
| "learning_rate": 7.947527769289321e-06, | |
| "loss": 0.0703, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 18.3, | |
| "learning_rate": 7.923304701071034e-06, | |
| "loss": 0.0715, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 18.32, | |
| "learning_rate": 7.89909436052722e-06, | |
| "loss": 0.0634, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 18.34, | |
| "learning_rate": 7.8748968960385e-06, | |
| "loss": 0.0608, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 18.36, | |
| "learning_rate": 7.850712455906577e-06, | |
| "loss": 0.0862, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 18.38, | |
| "learning_rate": 7.82654118835333e-06, | |
| "loss": 0.1007, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 18.4, | |
| "learning_rate": 7.802383241519909e-06, | |
| "loss": 0.0555, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 18.43, | |
| "learning_rate": 7.778238763465817e-06, | |
| "loss": 0.0642, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 18.45, | |
| "learning_rate": 7.75410790216802e-06, | |
| "loss": 0.0728, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 18.47, | |
| "learning_rate": 7.729990805520018e-06, | |
| "loss": 0.0567, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 18.49, | |
| "learning_rate": 7.705887621330957e-06, | |
| "loss": 0.0638, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 18.51, | |
| "learning_rate": 7.681798497324717e-06, | |
| "loss": 0.0565, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 18.53, | |
| "learning_rate": 7.657723581139001e-06, | |
| "loss": 0.0584, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 18.55, | |
| "learning_rate": 7.633663020324439e-06, | |
| "loss": 0.0675, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 18.57, | |
| "learning_rate": 7.6096169623436754e-06, | |
| "loss": 0.0758, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 18.6, | |
| "learning_rate": 7.585585554570479e-06, | |
| "loss": 0.0709, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 18.62, | |
| "learning_rate": 7.5615689442888126e-06, | |
| "loss": 0.0594, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 18.64, | |
| "learning_rate": 7.5375672786919655e-06, | |
| "loss": 0.0667, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 18.66, | |
| "learning_rate": 7.513580704881622e-06, | |
| "loss": 0.065, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 18.68, | |
| "learning_rate": 7.489609369866984e-06, | |
| "loss": 0.0743, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 18.7, | |
| "learning_rate": 7.465653420563846e-06, | |
| "loss": 0.0692, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 18.72, | |
| "learning_rate": 7.441713003793709e-06, | |
| "loss": 0.0734, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 18.74, | |
| "learning_rate": 7.4177882662828835e-06, | |
| "loss": 0.0679, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 18.77, | |
| "learning_rate": 7.3938793546615775e-06, | |
| "loss": 0.0736, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 18.79, | |
| "learning_rate": 7.36998641546301e-06, | |
| "loss": 0.071, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 18.81, | |
| "learning_rate": 7.346109595122508e-06, | |
| "loss": 0.0605, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 18.83, | |
| "learning_rate": 7.3222490399766075e-06, | |
| "loss": 0.0784, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 18.85, | |
| "learning_rate": 7.298404896262159e-06, | |
| "loss": 0.0734, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 18.87, | |
| "learning_rate": 7.274577310115425e-06, | |
| "loss": 0.0686, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 18.89, | |
| "learning_rate": 7.2507664275712e-06, | |
| "loss": 0.0489, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 18.91, | |
| "learning_rate": 7.2269723945618954e-06, | |
| "loss": 0.0969, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 18.94, | |
| "learning_rate": 7.203195356916665e-06, | |
| "loss": 0.0679, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 18.96, | |
| "learning_rate": 7.179435460360491e-06, | |
| "loss": 0.0714, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 18.98, | |
| "learning_rate": 7.155692850513307e-06, | |
| "loss": 0.0665, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 19.0, | |
| "learning_rate": 7.131967672889101e-06, | |
| "loss": 0.0778, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 19.02, | |
| "learning_rate": 7.108260072895013e-06, | |
| "loss": 0.0616, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 19.04, | |
| "learning_rate": 7.084570195830467e-06, | |
| "loss": 0.0843, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 19.06, | |
| "learning_rate": 7.0608981868862535e-06, | |
| "loss": 0.0624, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 19.09, | |
| "learning_rate": 7.037244191143662e-06, | |
| "loss": 0.0563, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 19.11, | |
| "learning_rate": 7.013608353573578e-06, | |
| "loss": 0.0875, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 19.13, | |
| "learning_rate": 6.989990819035597e-06, | |
| "loss": 0.0671, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 19.15, | |
| "learning_rate": 6.966391732277143e-06, | |
| "loss": 0.0709, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 19.17, | |
| "learning_rate": 6.9428112379325755e-06, | |
| "loss": 0.0603, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 19.19, | |
| "learning_rate": 6.919249480522306e-06, | |
| "loss": 0.0672, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 19.21, | |
| "learning_rate": 6.895706604451905e-06, | |
| "loss": 0.0599, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 19.23, | |
| "learning_rate": 6.872182754011233e-06, | |
| "loss": 0.069, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 19.26, | |
| "learning_rate": 6.848678073373536e-06, | |
| "loss": 0.0624, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 19.28, | |
| "learning_rate": 6.8251927065945755e-06, | |
| "loss": 0.0893, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 19.3, | |
| "learning_rate": 6.801726797611746e-06, | |
| "loss": 0.0515, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 19.32, | |
| "learning_rate": 6.778280490243182e-06, | |
| "loss": 0.0736, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 19.34, | |
| "learning_rate": 6.754853928186889e-06, | |
| "loss": 0.0859, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 19.36, | |
| "learning_rate": 6.731447255019855e-06, | |
| "loss": 0.0676, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 19.38, | |
| "learning_rate": 6.70806061419717e-06, | |
| "loss": 0.0508, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 19.4, | |
| "learning_rate": 6.684694149051156e-06, | |
| "loss": 0.0863, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 19.43, | |
| "learning_rate": 6.6613480027904735e-06, | |
| "loss": 0.0926, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 19.45, | |
| "learning_rate": 6.638022318499262e-06, | |
| "loss": 0.0589, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 19.47, | |
| "learning_rate": 6.614717239136246e-06, | |
| "loss": 0.0643, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 19.49, | |
| "learning_rate": 6.59143290753387e-06, | |
| "loss": 0.0655, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 19.51, | |
| "learning_rate": 6.568169466397419e-06, | |
| "loss": 0.0733, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 19.53, | |
| "learning_rate": 6.544927058304139e-06, | |
| "loss": 0.0626, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 19.55, | |
| "learning_rate": 6.521705825702379e-06, | |
| "loss": 0.0642, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 19.57, | |
| "learning_rate": 6.498505910910697e-06, | |
| "loss": 0.0651, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 19.6, | |
| "learning_rate": 6.475327456117005e-06, | |
| "loss": 0.0685, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 19.62, | |
| "learning_rate": 6.452170603377689e-06, | |
| "loss": 0.0661, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 19.64, | |
| "learning_rate": 6.429035494616736e-06, | |
| "loss": 0.0829, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 19.66, | |
| "learning_rate": 6.405922271624874e-06, | |
| "loss": 0.0599, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 19.68, | |
| "learning_rate": 6.382831076058692e-06, | |
| "loss": 0.0703, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 19.7, | |
| "learning_rate": 6.359762049439785e-06, | |
| "loss": 0.0555, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 19.72, | |
| "learning_rate": 6.336715333153869e-06, | |
| "loss": 0.0668, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 19.74, | |
| "learning_rate": 6.313691068449931e-06, | |
| "loss": 0.0645, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 19.77, | |
| "learning_rate": 6.290689396439354e-06, | |
| "loss": 0.0608, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 19.79, | |
| "learning_rate": 6.267710458095053e-06, | |
| "loss": 0.0805, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 19.81, | |
| "learning_rate": 6.244754394250619e-06, | |
| "loss": 0.0724, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 19.83, | |
| "learning_rate": 6.221821345599437e-06, | |
| "loss": 0.0621, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 19.85, | |
| "learning_rate": 6.1989114526938535e-06, | |
| "loss": 0.0792, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 19.87, | |
| "learning_rate": 6.1760248559442815e-06, | |
| "loss": 0.0704, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 19.89, | |
| "learning_rate": 6.153161695618368e-06, | |
| "loss": 0.071, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 19.91, | |
| "learning_rate": 6.130322111840114e-06, | |
| "loss": 0.0621, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 19.94, | |
| "learning_rate": 6.10750624458903e-06, | |
| "loss": 0.0938, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 19.96, | |
| "learning_rate": 6.084714233699271e-06, | |
| "loss": 0.0898, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 19.98, | |
| "learning_rate": 6.06194621885878e-06, | |
| "loss": 0.0484, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "learning_rate": 6.039202339608432e-06, | |
| "loss": 0.0649, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 20.02, | |
| "learning_rate": 6.016482735341183e-06, | |
| "loss": 0.0596, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 20.04, | |
| "learning_rate": 5.993787545301204e-06, | |
| "loss": 0.075, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 20.06, | |
| "learning_rate": 5.97111690858305e-06, | |
| "loss": 0.0877, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 20.09, | |
| "learning_rate": 5.948470964130777e-06, | |
| "loss": 0.0595, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 20.11, | |
| "learning_rate": 5.92584985073712e-06, | |
| "loss": 0.0766, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 20.13, | |
| "learning_rate": 5.90325370704262e-06, | |
| "loss": 0.0638, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 20.15, | |
| "learning_rate": 5.880682671534792e-06, | |
| "loss": 0.0726, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 20.17, | |
| "learning_rate": 5.858136882547258e-06, | |
| "loss": 0.0634, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 20.19, | |
| "learning_rate": 5.835616478258915e-06, | |
| "loss": 0.0637, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 20.21, | |
| "learning_rate": 5.81312159669308e-06, | |
| "loss": 0.0709, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 20.23, | |
| "learning_rate": 5.790652375716653e-06, | |
| "loss": 0.0645, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 20.26, | |
| "learning_rate": 5.768208953039247e-06, | |
| "loss": 0.0722, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 20.28, | |
| "learning_rate": 5.745791466212383e-06, | |
| "loss": 0.0722, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 20.3, | |
| "learning_rate": 5.723400052628616e-06, | |
| "loss": 0.0595, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 20.32, | |
| "learning_rate": 5.701034849520699e-06, | |
| "loss": 0.064, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 20.34, | |
| "learning_rate": 5.678695993960752e-06, | |
| "loss": 0.0598, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 20.36, | |
| "learning_rate": 5.656383622859418e-06, | |
| "loss": 0.0652, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 20.38, | |
| "learning_rate": 5.634097872965021e-06, | |
| "loss": 0.0792, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 20.4, | |
| "learning_rate": 5.611838880862718e-06, | |
| "loss": 0.0612, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 20.43, | |
| "learning_rate": 5.589606782973683e-06, | |
| "loss": 0.0947, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 20.45, | |
| "learning_rate": 5.5674017155542634e-06, | |
| "loss": 0.0496, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 20.47, | |
| "learning_rate": 5.545223814695129e-06, | |
| "loss": 0.0592, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 20.49, | |
| "learning_rate": 5.523073216320461e-06, | |
| "loss": 0.0771, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 20.51, | |
| "learning_rate": 5.500950056187108e-06, | |
| "loss": 0.0621, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 20.53, | |
| "learning_rate": 5.478854469883755e-06, | |
| "loss": 0.0764, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 20.55, | |
| "learning_rate": 5.4567865928300835e-06, | |
| "loss": 0.06, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 20.57, | |
| "learning_rate": 5.434746560275961e-06, | |
| "loss": 0.0557, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 20.6, | |
| "learning_rate": 5.412734507300599e-06, | |
| "loss": 0.068, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 20.62, | |
| "learning_rate": 5.39075056881172e-06, | |
| "loss": 0.0614, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 20.64, | |
| "learning_rate": 5.368794879544747e-06, | |
| "loss": 0.0774, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 20.66, | |
| "learning_rate": 5.346867574061969e-06, | |
| "loss": 0.0705, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 20.68, | |
| "learning_rate": 5.3249687867517095e-06, | |
| "loss": 0.069, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 20.7, | |
| "learning_rate": 5.303098651827509e-06, | |
| "loss": 0.0672, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 20.72, | |
| "learning_rate": 5.281257303327309e-06, | |
| "loss": 0.086, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 20.74, | |
| "learning_rate": 5.259444875112625e-06, | |
| "loss": 0.0602, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 20.77, | |
| "learning_rate": 5.237661500867724e-06, | |
| "loss": 0.0647, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 20.79, | |
| "learning_rate": 5.215907314098796e-06, | |
| "loss": 0.058, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 20.81, | |
| "learning_rate": 5.194182448133163e-06, | |
| "loss": 0.0599, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 20.83, | |
| "learning_rate": 5.172487036118441e-06, | |
| "loss": 0.0678, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 20.85, | |
| "learning_rate": 5.150821211021716e-06, | |
| "loss": 0.0472, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 20.87, | |
| "learning_rate": 5.1291851056287555e-06, | |
| "loss": 0.0858, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 20.89, | |
| "learning_rate": 5.107578852543176e-06, | |
| "loss": 0.0642, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 20.91, | |
| "learning_rate": 5.086002584185638e-06, | |
| "loss": 0.0756, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 20.94, | |
| "learning_rate": 5.064456432793019e-06, | |
| "loss": 0.0561, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 20.96, | |
| "learning_rate": 5.04294053041763e-06, | |
| "loss": 0.0583, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 20.98, | |
| "learning_rate": 5.0214550089263885e-06, | |
| "loss": 0.0706, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 21.0, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.0679, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 21.02, | |
| "learning_rate": 4.978575635132185e-06, | |
| "loss": 0.0597, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 21.04, | |
| "learning_rate": 4.95718204562884e-06, | |
| "loss": 0.0592, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 21.06, | |
| "learning_rate": 4.9358193626072404e-06, | |
| "loss": 0.0647, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 21.09, | |
| "learning_rate": 4.914487716995252e-06, | |
| "loss": 0.0575, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 21.11, | |
| "learning_rate": 4.8931872395305145e-06, | |
| "loss": 0.0526, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 21.13, | |
| "learning_rate": 4.8719180607596486e-06, | |
| "loss": 0.0543, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 21.15, | |
| "learning_rate": 4.8506803110374366e-06, | |
| "loss": 0.065, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 21.17, | |
| "learning_rate": 4.8294741205260534e-06, | |
| "loss": 0.0694, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 21.19, | |
| "learning_rate": 4.808299619194251e-06, | |
| "loss": 0.0856, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 21.21, | |
| "learning_rate": 4.787156936816553e-06, | |
| "loss": 0.0927, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 21.23, | |
| "learning_rate": 4.766046202972483e-06, | |
| "loss": 0.0514, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 21.26, | |
| "learning_rate": 4.744967547045755e-06, | |
| "loss": 0.0647, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 21.28, | |
| "learning_rate": 4.723921098223484e-06, | |
| "loss": 0.0841, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 21.3, | |
| "learning_rate": 4.702906985495387e-06, | |
| "loss": 0.0586, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 21.32, | |
| "learning_rate": 4.681925337653006e-06, | |
| "loss": 0.0602, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 21.34, | |
| "learning_rate": 4.660976283288914e-06, | |
| "loss": 0.0756, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 21.36, | |
| "learning_rate": 4.640059950795914e-06, | |
| "loss": 0.0589, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 21.38, | |
| "learning_rate": 4.619176468366274e-06, | |
| "loss": 0.069, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 21.4, | |
| "learning_rate": 4.598325963990925e-06, | |
| "loss": 0.055, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 21.43, | |
| "learning_rate": 4.577508565458685e-06, | |
| "loss": 0.0676, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 21.45, | |
| "learning_rate": 4.5567244003554645e-06, | |
| "loss": 0.061, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 21.47, | |
| "learning_rate": 4.5359735960635e-06, | |
| "loss": 0.0605, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 21.49, | |
| "learning_rate": 4.51525627976057e-06, | |
| "loss": 0.0728, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 21.51, | |
| "learning_rate": 4.494572578419194e-06, | |
| "loss": 0.0684, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 21.53, | |
| "learning_rate": 4.47392261880589e-06, | |
| "loss": 0.0626, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 21.55, | |
| "learning_rate": 4.453306527480373e-06, | |
| "loss": 0.0811, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 21.57, | |
| "learning_rate": 4.432724430794786e-06, | |
| "loss": 0.0645, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 21.6, | |
| "learning_rate": 4.412176454892918e-06, | |
| "loss": 0.0702, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 21.62, | |
| "learning_rate": 4.391662725709449e-06, | |
| "loss": 0.0717, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 21.64, | |
| "learning_rate": 4.371183368969165e-06, | |
| "loss": 0.0618, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 21.66, | |
| "learning_rate": 4.350738510186182e-06, | |
| "loss": 0.0619, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 21.68, | |
| "learning_rate": 4.330328274663192e-06, | |
| "loss": 0.0767, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 21.7, | |
| "learning_rate": 4.309952787490689e-06, | |
| "loss": 0.0614, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 21.72, | |
| "learning_rate": 4.28961217354619e-06, | |
| "loss": 0.074, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 21.74, | |
| "learning_rate": 4.269306557493493e-06, | |
| "loss": 0.0598, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 21.77, | |
| "learning_rate": 4.2490360637818965e-06, | |
| "loss": 0.0588, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 21.79, | |
| "learning_rate": 4.22880081664544e-06, | |
| "loss": 0.0771, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 21.81, | |
| "learning_rate": 4.208600940102139e-06, | |
| "loss": 0.0713, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 21.83, | |
| "learning_rate": 4.188436557953235e-06, | |
| "loss": 0.0503, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 21.85, | |
| "learning_rate": 4.168307793782434e-06, | |
| "loss": 0.0845, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 21.87, | |
| "learning_rate": 4.148214770955136e-06, | |
| "loss": 0.0556, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 21.89, | |
| "learning_rate": 4.128157612617696e-06, | |
| "loss": 0.0749, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 21.91, | |
| "learning_rate": 4.108136441696662e-06, | |
| "loss": 0.0436, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 21.94, | |
| "learning_rate": 4.088151380898023e-06, | |
| "loss": 0.0544, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 21.96, | |
| "learning_rate": 4.0682025527064486e-06, | |
| "loss": 0.06, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 21.98, | |
| "learning_rate": 4.048290079384554e-06, | |
| "loss": 0.0796, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 22.0, | |
| "learning_rate": 4.028414082972141e-06, | |
| "loss": 0.0697, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 22.02, | |
| "learning_rate": 4.008574685285442e-06, | |
| "loss": 0.0601, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 22.04, | |
| "learning_rate": 3.988772007916396e-06, | |
| "loss": 0.0667, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 22.06, | |
| "learning_rate": 3.969006172231883e-06, | |
| "loss": 0.0608, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 22.09, | |
| "learning_rate": 3.94927729937299e-06, | |
| "loss": 0.076, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 22.11, | |
| "learning_rate": 3.9295855102542604e-06, | |
| "loss": 0.0738, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 22.13, | |
| "learning_rate": 3.909930925562962e-06, | |
| "loss": 0.0562, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 22.15, | |
| "learning_rate": 3.890313665758348e-06, | |
| "loss": 0.0634, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 22.17, | |
| "learning_rate": 3.870733851070904e-06, | |
| "loss": 0.0686, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 22.19, | |
| "learning_rate": 3.851191601501632e-06, | |
| "loss": 0.053, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 22.21, | |
| "learning_rate": 3.8316870368213e-06, | |
| "loss": 0.0694, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 22.23, | |
| "learning_rate": 3.8122202765697135e-06, | |
| "loss": 0.075, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 22.26, | |
| "learning_rate": 3.7927914400549748e-06, | |
| "loss": 0.0659, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 22.28, | |
| "learning_rate": 3.7734006463527695e-06, | |
| "loss": 0.0642, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 22.3, | |
| "learning_rate": 3.7540480143056233e-06, | |
| "loss": 0.0845, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 22.32, | |
| "learning_rate": 3.7347336625221686e-06, | |
| "loss": 0.0806, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 22.34, | |
| "learning_rate": 3.7154577093764334e-06, | |
| "loss": 0.0579, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 22.36, | |
| "learning_rate": 3.6962202730071075e-06, | |
| "loss": 0.0583, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 22.38, | |
| "learning_rate": 3.6770214713168117e-06, | |
| "loss": 0.0642, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 22.4, | |
| "learning_rate": 3.6578614219713883e-06, | |
| "loss": 0.0583, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 22.43, | |
| "learning_rate": 3.6387402423991724e-06, | |
| "loss": 0.0679, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 22.45, | |
| "learning_rate": 3.619658049790279e-06, | |
| "loss": 0.0612, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 22.47, | |
| "learning_rate": 3.600614961095863e-06, | |
| "loss": 0.0612, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 22.49, | |
| "learning_rate": 3.5816110930274363e-06, | |
| "loss": 0.0658, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 22.51, | |
| "learning_rate": 3.56264656205613e-06, | |
| "loss": 0.061, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 22.53, | |
| "learning_rate": 3.543721484411976e-06, | |
| "loss": 0.0466, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 22.55, | |
| "learning_rate": 3.5248359760832174e-06, | |
| "loss": 0.0715, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 22.57, | |
| "learning_rate": 3.505990152815577e-06, | |
| "loss": 0.0514, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 22.6, | |
| "learning_rate": 3.4871841301115615e-06, | |
| "loss": 0.0724, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 22.62, | |
| "learning_rate": 3.468418023229737e-06, | |
| "loss": 0.0624, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 22.64, | |
| "learning_rate": 3.4496919471840407e-06, | |
| "loss": 0.0688, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 22.66, | |
| "learning_rate": 3.4310060167430724e-06, | |
| "loss": 0.0717, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 22.68, | |
| "learning_rate": 3.412360346429373e-06, | |
| "loss": 0.0762, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 22.7, | |
| "learning_rate": 3.393755050518749e-06, | |
| "loss": 0.0804, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 22.72, | |
| "learning_rate": 3.3751902430395558e-06, | |
| "loss": 0.0492, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 22.74, | |
| "learning_rate": 3.3566660377720095e-06, | |
| "loss": 0.0672, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 22.77, | |
| "learning_rate": 3.338182548247464e-06, | |
| "loss": 0.052, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 22.79, | |
| "learning_rate": 3.3197398877477528e-06, | |
| "loss": 0.0849, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 22.81, | |
| "learning_rate": 3.3013381693044676e-06, | |
| "loss": 0.0707, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 22.83, | |
| "learning_rate": 3.2829775056982827e-06, | |
| "loss": 0.055, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 22.85, | |
| "learning_rate": 3.264658009458239e-06, | |
| "loss": 0.0846, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 22.87, | |
| "learning_rate": 3.246379792861084e-06, | |
| "loss": 0.0494, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 22.89, | |
| "learning_rate": 3.2281429679305675e-06, | |
| "loss": 0.0624, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 22.91, | |
| "learning_rate": 3.209947646436752e-06, | |
| "loss": 0.0673, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 22.94, | |
| "learning_rate": 3.1917939398953378e-06, | |
| "loss": 0.0743, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 22.96, | |
| "learning_rate": 3.1736819595669764e-06, | |
| "loss": 0.0845, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 22.98, | |
| "learning_rate": 3.1556118164565863e-06, | |
| "loss": 0.0539, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 23.0, | |
| "learning_rate": 3.1375836213126653e-06, | |
| "loss": 0.0593, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 23.02, | |
| "learning_rate": 3.1195974846266332e-06, | |
| "loss": 0.0602, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 23.04, | |
| "learning_rate": 3.101653516632136e-06, | |
| "loss": 0.0556, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 23.06, | |
| "learning_rate": 3.083751827304369e-06, | |
| "loss": 0.0849, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 23.09, | |
| "learning_rate": 3.0658925263594185e-06, | |
| "loss": 0.0655, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 23.11, | |
| "learning_rate": 3.0480757232535773e-06, | |
| "loss": 0.0833, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 23.13, | |
| "learning_rate": 3.0303015271826806e-06, | |
| "loss": 0.0717, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 23.15, | |
| "learning_rate": 3.0125700470814236e-06, | |
| "loss": 0.0714, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 23.17, | |
| "learning_rate": 2.9948813916227114e-06, | |
| "loss": 0.075, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 23.19, | |
| "learning_rate": 2.9772356692169867e-06, | |
| "loss": 0.0648, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 23.21, | |
| "learning_rate": 2.9596329880115538e-06, | |
| "loss": 0.0622, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 23.23, | |
| "learning_rate": 2.9420734558899323e-06, | |
| "loss": 0.071, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 23.26, | |
| "learning_rate": 2.9245571804711934e-06, | |
| "loss": 0.0623, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 23.28, | |
| "learning_rate": 2.9070842691092847e-06, | |
| "loss": 0.0717, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 23.3, | |
| "learning_rate": 2.889654828892393e-06, | |
| "loss": 0.0528, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 23.32, | |
| "learning_rate": 2.8722689666422753e-06, | |
| "loss": 0.0684, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 23.34, | |
| "learning_rate": 2.854926788913611e-06, | |
| "loss": 0.0719, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 23.36, | |
| "learning_rate": 2.8376284019933377e-06, | |
| "loss": 0.0659, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 23.38, | |
| "learning_rate": 2.820373911900012e-06, | |
| "loss": 0.0546, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 23.4, | |
| "learning_rate": 2.8031634243831583e-06, | |
| "loss": 0.0797, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 23.43, | |
| "learning_rate": 2.7859970449226103e-06, | |
| "loss": 0.0616, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 23.45, | |
| "learning_rate": 2.768874878727876e-06, | |
| "loss": 0.0605, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 23.47, | |
| "learning_rate": 2.751797030737491e-06, | |
| "loss": 0.0663, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 23.49, | |
| "learning_rate": 2.73476360561837e-06, | |
| "loss": 0.0615, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 23.51, | |
| "learning_rate": 2.717774707765165e-06, | |
| "loss": 0.0591, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 23.53, | |
| "learning_rate": 2.7008304412996335e-06, | |
| "loss": 0.0556, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 23.55, | |
| "learning_rate": 2.6839309100699975e-06, | |
| "loss": 0.0445, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 23.57, | |
| "learning_rate": 2.6670762176502963e-06, | |
| "loss": 0.0683, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 23.6, | |
| "learning_rate": 2.6502664673397693e-06, | |
| "loss": 0.0687, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 23.62, | |
| "learning_rate": 2.6335017621622116e-06, | |
| "loss": 0.0682, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 23.64, | |
| "learning_rate": 2.616782204865347e-06, | |
| "loss": 0.0574, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 23.66, | |
| "learning_rate": 2.600107897920188e-06, | |
| "loss": 0.0701, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 23.68, | |
| "learning_rate": 2.5834789435204245e-06, | |
| "loss": 0.059, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 23.7, | |
| "learning_rate": 2.566895443581792e-06, | |
| "loss": 0.068, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 23.72, | |
| "learning_rate": 2.5503574997414315e-06, | |
| "loss": 0.0624, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 23.74, | |
| "learning_rate": 2.5338652133572915e-06, | |
| "loss": 0.0676, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 23.77, | |
| "learning_rate": 2.517418685507489e-06, | |
| "loss": 0.0771, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 23.79, | |
| "learning_rate": 2.501018016989699e-06, | |
| "loss": 0.0768, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 23.81, | |
| "learning_rate": 2.484663308320526e-06, | |
| "loss": 0.0553, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 23.83, | |
| "learning_rate": 2.468354659734903e-06, | |
| "loss": 0.0606, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 23.85, | |
| "learning_rate": 2.4520921711854683e-06, | |
| "loss": 0.0614, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 23.87, | |
| "learning_rate": 2.4358759423419476e-06, | |
| "loss": 0.084, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 23.89, | |
| "learning_rate": 2.4197060725905563e-06, | |
| "loss": 0.0704, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 23.91, | |
| "learning_rate": 2.4035826610333844e-06, | |
| "loss": 0.0542, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 23.94, | |
| "learning_rate": 2.387505806487781e-06, | |
| "loss": 0.0616, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 23.96, | |
| "learning_rate": 2.3714756074857613e-06, | |
| "loss": 0.0711, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 23.98, | |
| "learning_rate": 2.3554921622733984e-06, | |
| "loss": 0.0622, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 24.0, | |
| "learning_rate": 2.339555568810221e-06, | |
| "loss": 0.057, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 24.02, | |
| "learning_rate": 2.3236659247686044e-06, | |
| "loss": 0.0649, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 24.04, | |
| "learning_rate": 2.3078233275331862e-06, | |
| "loss": 0.0784, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 24.06, | |
| "learning_rate": 2.2920278742002677e-06, | |
| "loss": 0.0595, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 24.09, | |
| "learning_rate": 2.2762796615772e-06, | |
| "loss": 0.0801, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 24.11, | |
| "learning_rate": 2.2605787861818172e-06, | |
| "loss": 0.0678, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 24.13, | |
| "learning_rate": 2.2449253442418282e-06, | |
| "loss": 0.0629, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 24.15, | |
| "learning_rate": 2.2293194316942344e-06, | |
| "loss": 0.075, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 24.17, | |
| "learning_rate": 2.2137611441847294e-06, | |
| "loss": 0.061, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 24.19, | |
| "learning_rate": 2.1982505770671303e-06, | |
| "loss": 0.06, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 24.21, | |
| "learning_rate": 2.182787825402787e-06, | |
| "loss": 0.0642, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 24.23, | |
| "learning_rate": 2.1673729839599854e-06, | |
| "loss": 0.0581, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 24.26, | |
| "learning_rate": 2.1520061472133903e-06, | |
| "loss": 0.0562, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 24.28, | |
| "learning_rate": 2.1366874093434496e-06, | |
| "loss": 0.0691, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 24.3, | |
| "learning_rate": 2.121416864235828e-06, | |
| "loss": 0.0534, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 24.32, | |
| "learning_rate": 2.1061946054808145e-06, | |
| "loss": 0.0586, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 24.34, | |
| "learning_rate": 2.09102072637277e-06, | |
| "loss": 0.0691, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 24.36, | |
| "learning_rate": 2.0758953199095456e-06, | |
| "loss": 0.0764, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 24.38, | |
| "learning_rate": 2.0608184787919027e-06, | |
| "loss": 0.0641, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 24.4, | |
| "learning_rate": 2.045790295422966e-06, | |
| "loss": 0.0728, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 24.43, | |
| "learning_rate": 2.030810861907643e-06, | |
| "loss": 0.0619, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 24.45, | |
| "learning_rate": 2.0158802700520576e-06, | |
| "loss": 0.0581, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 24.47, | |
| "learning_rate": 2.0009986113629974e-06, | |
| "loss": 0.0545, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 24.49, | |
| "learning_rate": 1.9861659770473473e-06, | |
| "loss": 0.0611, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 24.51, | |
| "learning_rate": 1.9713824580115336e-06, | |
| "loss": 0.0564, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 24.53, | |
| "learning_rate": 1.956648144860954e-06, | |
| "loss": 0.0757, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 24.55, | |
| "learning_rate": 1.9419631278994422e-06, | |
| "loss": 0.0718, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 24.57, | |
| "learning_rate": 1.927327497128706e-06, | |
| "loss": 0.0674, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 24.6, | |
| "learning_rate": 1.9127413422477658e-06, | |
| "loss": 0.06, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 24.62, | |
| "learning_rate": 1.8982047526524195e-06, | |
| "loss": 0.0804, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 24.64, | |
| "learning_rate": 1.8837178174346882e-06, | |
| "loss": 0.0795, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 24.66, | |
| "learning_rate": 1.8692806253822727e-06, | |
| "loss": 0.0765, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 24.68, | |
| "learning_rate": 1.8548932649780005e-06, | |
| "loss": 0.0526, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 24.7, | |
| "learning_rate": 1.840555824399296e-06, | |
| "loss": 0.0703, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 24.72, | |
| "learning_rate": 1.826268391517637e-06, | |
| "loss": 0.0731, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 24.74, | |
| "learning_rate": 1.8120310538980024e-06, | |
| "loss": 0.0658, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 24.77, | |
| "learning_rate": 1.7978438987983582e-06, | |
| "loss": 0.0649, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 24.79, | |
| "learning_rate": 1.7837070131691058e-06, | |
| "loss": 0.0785, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 24.81, | |
| "learning_rate": 1.76962048365256e-06, | |
| "loss": 0.0598, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 24.83, | |
| "learning_rate": 1.7555843965823992e-06, | |
| "loss": 0.0696, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 24.85, | |
| "learning_rate": 1.7415988379831616e-06, | |
| "loss": 0.055, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 24.87, | |
| "learning_rate": 1.7276638935697044e-06, | |
| "loss": 0.0744, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 24.89, | |
| "learning_rate": 1.7137796487466795e-06, | |
| "loss": 0.0548, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 24.91, | |
| "learning_rate": 1.6999461886080048e-06, | |
| "loss": 0.0598, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 24.94, | |
| "learning_rate": 1.6861635979363545e-06, | |
| "loss": 0.0691, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 24.96, | |
| "learning_rate": 1.6724319612026351e-06, | |
| "loss": 0.0533, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 24.98, | |
| "learning_rate": 1.6587513625654572e-06, | |
| "loss": 0.0614, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 1.6451218858706374e-06, | |
| "loss": 0.0638, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 25.02, | |
| "learning_rate": 1.6315436146506702e-06, | |
| "loss": 0.0632, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 25.04, | |
| "learning_rate": 1.6180166321242275e-06, | |
| "loss": 0.061, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 25.06, | |
| "learning_rate": 1.6045410211956325e-06, | |
| "loss": 0.0632, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 25.09, | |
| "learning_rate": 1.5911168644543706e-06, | |
| "loss": 0.0691, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 25.11, | |
| "learning_rate": 1.5777442441745715e-06, | |
| "loss": 0.058, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 25.13, | |
| "learning_rate": 1.5644232423145044e-06, | |
| "loss": 0.084, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 25.15, | |
| "learning_rate": 1.5511539405160824e-06, | |
| "loss": 0.0648, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 25.17, | |
| "learning_rate": 1.537936420104359e-06, | |
| "loss": 0.0693, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 25.19, | |
| "learning_rate": 1.52477076208703e-06, | |
| "loss": 0.0702, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 25.21, | |
| "learning_rate": 1.5116570471539294e-06, | |
| "loss": 0.057, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 25.23, | |
| "learning_rate": 1.4985953556765486e-06, | |
| "loss": 0.046, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 25.26, | |
| "learning_rate": 1.485585767707539e-06, | |
| "loss": 0.059, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 25.28, | |
| "learning_rate": 1.4726283629802107e-06, | |
| "loss": 0.055, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 25.3, | |
| "learning_rate": 1.4597232209080603e-06, | |
| "loss": 0.0693, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 25.32, | |
| "learning_rate": 1.446870420584272e-06, | |
| "loss": 0.0547, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 25.34, | |
| "learning_rate": 1.4340700407812436e-06, | |
| "loss": 0.049, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 25.36, | |
| "learning_rate": 1.421322159950087e-06, | |
| "loss": 0.0532, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 25.38, | |
| "learning_rate": 1.4086268562201654e-06, | |
| "loss": 0.0647, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 25.4, | |
| "learning_rate": 1.3959842073986085e-06, | |
| "loss": 0.072, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 25.43, | |
| "learning_rate": 1.383394290969824e-06, | |
| "loss": 0.0512, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 25.45, | |
| "learning_rate": 1.3708571840950434e-06, | |
| "loss": 0.0536, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 25.47, | |
| "learning_rate": 1.3583729636118359e-06, | |
| "loss": 0.0766, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 25.49, | |
| "learning_rate": 1.3459417060336344e-06, | |
| "loss": 0.0878, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 25.51, | |
| "learning_rate": 1.3335634875492765e-06, | |
| "loss": 0.0578, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 25.53, | |
| "learning_rate": 1.3212383840225328e-06, | |
| "loss": 0.0721, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 25.55, | |
| "learning_rate": 1.308966470991646e-06, | |
| "loss": 0.0513, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 25.57, | |
| "learning_rate": 1.2967478236688546e-06, | |
| "loss": 0.0798, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 25.6, | |
| "learning_rate": 1.2845825169399506e-06, | |
| "loss": 0.0526, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 25.62, | |
| "learning_rate": 1.2724706253638108e-06, | |
| "loss": 0.0564, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 25.64, | |
| "learning_rate": 1.2604122231719318e-06, | |
| "loss": 0.051, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 25.66, | |
| "learning_rate": 1.2484073842679945e-06, | |
| "loss": 0.0586, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 25.68, | |
| "learning_rate": 1.2364561822273958e-06, | |
| "loss": 0.0798, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 25.7, | |
| "learning_rate": 1.2245586902968033e-06, | |
| "loss": 0.0673, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 25.72, | |
| "learning_rate": 1.2127149813937023e-06, | |
| "loss": 0.0759, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 25.74, | |
| "learning_rate": 1.2009251281059576e-06, | |
| "loss": 0.0716, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 25.77, | |
| "learning_rate": 1.18918920269136e-06, | |
| "loss": 0.0631, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 25.79, | |
| "learning_rate": 1.1775072770771833e-06, | |
| "loss": 0.0561, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 25.81, | |
| "learning_rate": 1.1658794228597524e-06, | |
| "loss": 0.0826, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 25.83, | |
| "learning_rate": 1.1543057113039956e-06, | |
| "loss": 0.0517, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 25.85, | |
| "learning_rate": 1.1427862133430157e-06, | |
| "loss": 0.0807, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 25.87, | |
| "learning_rate": 1.1313209995776408e-06, | |
| "loss": 0.0876, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 25.89, | |
| "learning_rate": 1.1199101402760116e-06, | |
| "loss": 0.0672, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 25.91, | |
| "learning_rate": 1.1085537053731355e-06, | |
| "loss": 0.0579, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 25.94, | |
| "learning_rate": 1.0972517644704616e-06, | |
| "loss": 0.0727, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 25.96, | |
| "learning_rate": 1.0860043868354585e-06, | |
| "loss": 0.0569, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 25.98, | |
| "learning_rate": 1.074811641401189e-06, | |
| "loss": 0.0777, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 26.0, | |
| "learning_rate": 1.0636735967658785e-06, | |
| "loss": 0.0733, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 26.02, | |
| "learning_rate": 1.0525903211925071e-06, | |
| "loss": 0.0607, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 26.04, | |
| "learning_rate": 1.0415618826083828e-06, | |
| "loss": 0.0583, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 26.06, | |
| "learning_rate": 1.030588348604733e-06, | |
| "loss": 0.0672, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 26.09, | |
| "learning_rate": 1.019669786436277e-06, | |
| "loss": 0.0656, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 26.11, | |
| "learning_rate": 1.0088062630208272e-06, | |
| "loss": 0.0636, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 26.13, | |
| "learning_rate": 9.979978449388772e-07, | |
| "loss": 0.0865, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 26.15, | |
| "learning_rate": 9.872445984331814e-07, | |
| "loss": 0.0569, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 26.17, | |
| "learning_rate": 9.765465894083637e-07, | |
| "loss": 0.0566, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 26.19, | |
| "learning_rate": 9.65903883430509e-07, | |
| "loss": 0.0773, | |
| "step": 1231 | |
| }, | |
| { | |
| "epoch": 26.21, | |
| "learning_rate": 9.5531654572676e-07, | |
| "loss": 0.0628, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 26.23, | |
| "learning_rate": 9.447846411849115e-07, | |
| "loss": 0.0626, | |
| "step": 1233 | |
| }, | |
| { | |
| "epoch": 26.26, | |
| "learning_rate": 9.343082343530252e-07, | |
| "loss": 0.0611, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 26.28, | |
| "learning_rate": 9.238873894390288e-07, | |
| "loss": 0.0643, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 26.3, | |
| "learning_rate": 9.135221703103137e-07, | |
| "loss": 0.0606, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 26.32, | |
| "learning_rate": 9.032126404933594e-07, | |
| "loss": 0.0798, | |
| "step": 1237 | |
| }, | |
| { | |
| "epoch": 26.34, | |
| "learning_rate": 8.929588631733333e-07, | |
| "loss": 0.0921, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 26.36, | |
| "learning_rate": 8.827609011937066e-07, | |
| "loss": 0.0749, | |
| "step": 1239 | |
| }, | |
| { | |
| "epoch": 26.38, | |
| "learning_rate": 8.726188170558647e-07, | |
| "loss": 0.0638, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 26.4, | |
| "learning_rate": 8.625326729187334e-07, | |
| "loss": 0.0599, | |
| "step": 1241 | |
| }, | |
| { | |
| "epoch": 26.43, | |
| "learning_rate": 8.525025305983936e-07, | |
| "loss": 0.0589, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 26.45, | |
| "learning_rate": 8.425284515676946e-07, | |
| "loss": 0.064, | |
| "step": 1243 | |
| }, | |
| { | |
| "epoch": 26.47, | |
| "learning_rate": 8.326104969558912e-07, | |
| "loss": 0.0542, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 26.49, | |
| "learning_rate": 8.227487275482592e-07, | |
| "loss": 0.0611, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 26.51, | |
| "learning_rate": 8.129432037857287e-07, | |
| "loss": 0.0571, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 26.53, | |
| "learning_rate": 8.031939857645055e-07, | |
| "loss": 0.0719, | |
| "step": 1247 | |
| }, | |
| { | |
| "epoch": 26.55, | |
| "learning_rate": 7.935011332357113e-07, | |
| "loss": 0.0595, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 26.57, | |
| "learning_rate": 7.838647056050186e-07, | |
| "loss": 0.0701, | |
| "step": 1249 | |
| }, | |
| { | |
| "epoch": 26.6, | |
| "learning_rate": 7.742847619322724e-07, | |
| "loss": 0.0525, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 26.62, | |
| "learning_rate": 7.647613609311455e-07, | |
| "loss": 0.0662, | |
| "step": 1251 | |
| }, | |
| { | |
| "epoch": 26.64, | |
| "learning_rate": 7.552945609687723e-07, | |
| "loss": 0.0765, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 26.66, | |
| "learning_rate": 7.458844200653825e-07, | |
| "loss": 0.0659, | |
| "step": 1253 | |
| }, | |
| { | |
| "epoch": 26.68, | |
| "learning_rate": 7.365309958939615e-07, | |
| "loss": 0.0904, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 26.7, | |
| "learning_rate": 7.272343457798836e-07, | |
| "loss": 0.0642, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 26.72, | |
| "learning_rate": 7.179945267005683e-07, | |
| "loss": 0.0601, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 26.74, | |
| "learning_rate": 7.088115952851238e-07, | |
| "loss": 0.0561, | |
| "step": 1257 | |
| }, | |
| { | |
| "epoch": 26.77, | |
| "learning_rate": 6.996856078140113e-07, | |
| "loss": 0.0625, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 26.79, | |
| "learning_rate": 6.906166202186882e-07, | |
| "loss": 0.0532, | |
| "step": 1259 | |
| }, | |
| { | |
| "epoch": 26.81, | |
| "learning_rate": 6.81604688081271e-07, | |
| "loss": 0.07, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 26.83, | |
| "learning_rate": 6.726498666341963e-07, | |
| "loss": 0.08, | |
| "step": 1261 | |
| }, | |
| { | |
| "epoch": 26.85, | |
| "learning_rate": 6.637522107598782e-07, | |
| "loss": 0.0582, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 26.87, | |
| "learning_rate": 6.549117749903755e-07, | |
| "loss": 0.0581, | |
| "step": 1263 | |
| }, | |
| { | |
| "epoch": 26.89, | |
| "learning_rate": 6.461286135070533e-07, | |
| "loss": 0.0645, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 26.91, | |
| "learning_rate": 6.374027801402527e-07, | |
| "loss": 0.0554, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 26.94, | |
| "learning_rate": 6.287343283689662e-07, | |
| "loss": 0.0586, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 26.96, | |
| "learning_rate": 6.201233113205019e-07, | |
| "loss": 0.0742, | |
| "step": 1267 | |
| }, | |
| { | |
| "epoch": 26.98, | |
| "learning_rate": 6.115697817701616e-07, | |
| "loss": 0.0601, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 27.0, | |
| "learning_rate": 6.030737921409169e-07, | |
| "loss": 0.0437, | |
| "step": 1269 | |
| }, | |
| { | |
| "epoch": 27.02, | |
| "learning_rate": 5.946353945030903e-07, | |
| "loss": 0.0656, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 27.04, | |
| "learning_rate": 5.862546405740299e-07, | |
| "loss": 0.0607, | |
| "step": 1271 | |
| }, | |
| { | |
| "epoch": 27.06, | |
| "learning_rate": 5.779315817178e-07, | |
| "loss": 0.05, | |
| "step": 1272 | |
| }, | |
| { | |
| "epoch": 27.09, | |
| "learning_rate": 5.696662689448607e-07, | |
| "loss": 0.062, | |
| "step": 1273 | |
| }, | |
| { | |
| "epoch": 27.11, | |
| "learning_rate": 5.614587529117588e-07, | |
| "loss": 0.0571, | |
| "step": 1274 | |
| }, | |
| { | |
| "epoch": 27.13, | |
| "learning_rate": 5.533090839208133e-07, | |
| "loss": 0.0691, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 27.15, | |
| "learning_rate": 5.452173119198101e-07, | |
| "loss": 0.0663, | |
| "step": 1276 | |
| }, | |
| { | |
| "epoch": 27.17, | |
| "learning_rate": 5.371834865017001e-07, | |
| "loss": 0.0805, | |
| "step": 1277 | |
| }, | |
| { | |
| "epoch": 27.19, | |
| "learning_rate": 5.292076569042825e-07, | |
| "loss": 0.0667, | |
| "step": 1278 | |
| }, | |
| { | |
| "epoch": 27.21, | |
| "learning_rate": 5.212898720099168e-07, | |
| "loss": 0.0639, | |
| "step": 1279 | |
| }, | |
| { | |
| "epoch": 27.23, | |
| "learning_rate": 5.134301803452157e-07, | |
| "loss": 0.0666, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 27.26, | |
| "learning_rate": 5.056286300807511e-07, | |
| "loss": 0.0652, | |
| "step": 1281 | |
| }, | |
| { | |
| "epoch": 27.28, | |
| "learning_rate": 4.978852690307523e-07, | |
| "loss": 0.0684, | |
| "step": 1282 | |
| }, | |
| { | |
| "epoch": 27.3, | |
| "learning_rate": 4.902001446528237e-07, | |
| "loss": 0.0508, | |
| "step": 1283 | |
| }, | |
| { | |
| "epoch": 27.32, | |
| "learning_rate": 4.825733040476465e-07, | |
| "loss": 0.0632, | |
| "step": 1284 | |
| }, | |
| { | |
| "epoch": 27.34, | |
| "learning_rate": 4.750047939586877e-07, | |
| "loss": 0.0625, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 27.36, | |
| "learning_rate": 4.6749466077192217e-07, | |
| "loss": 0.0732, | |
| "step": 1286 | |
| }, | |
| { | |
| "epoch": 27.38, | |
| "learning_rate": 4.6004295051554236e-07, | |
| "loss": 0.0464, | |
| "step": 1287 | |
| }, | |
| { | |
| "epoch": 27.4, | |
| "learning_rate": 4.5264970885967816e-07, | |
| "loss": 0.0701, | |
| "step": 1288 | |
| }, | |
| { | |
| "epoch": 27.43, | |
| "learning_rate": 4.4531498111611284e-07, | |
| "loss": 0.0728, | |
| "step": 1289 | |
| }, | |
| { | |
| "epoch": 27.45, | |
| "learning_rate": 4.3803881223801414e-07, | |
| "loss": 0.0685, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 27.47, | |
| "learning_rate": 4.3082124681965156e-07, | |
| "loss": 0.0821, | |
| "step": 1291 | |
| }, | |
| { | |
| "epoch": 27.49, | |
| "learning_rate": 4.236623290961217e-07, | |
| "loss": 0.0634, | |
| "step": 1292 | |
| }, | |
| { | |
| "epoch": 27.51, | |
| "learning_rate": 4.165621029430855e-07, | |
| "loss": 0.0625, | |
| "step": 1293 | |
| }, | |
| { | |
| "epoch": 27.53, | |
| "learning_rate": 4.0952061187649383e-07, | |
| "loss": 0.0576, | |
| "step": 1294 | |
| }, | |
| { | |
| "epoch": 27.55, | |
| "learning_rate": 4.0253789905231546e-07, | |
| "loss": 0.0541, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 27.57, | |
| "learning_rate": 3.956140072662851e-07, | |
| "loss": 0.0548, | |
| "step": 1296 | |
| }, | |
| { | |
| "epoch": 27.6, | |
| "learning_rate": 3.887489789536314e-07, | |
| "loss": 0.0782, | |
| "step": 1297 | |
| }, | |
| { | |
| "epoch": 27.62, | |
| "learning_rate": 3.8194285618882167e-07, | |
| "loss": 0.0742, | |
| "step": 1298 | |
| }, | |
| { | |
| "epoch": 27.64, | |
| "learning_rate": 3.7519568068529854e-07, | |
| "loss": 0.0663, | |
| "step": 1299 | |
| }, | |
| { | |
| "epoch": 27.66, | |
| "learning_rate": 3.6850749379523044e-07, | |
| "loss": 0.0805, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 27.68, | |
| "learning_rate": 3.618783365092582e-07, | |
| "loss": 0.0537, | |
| "step": 1301 | |
| }, | |
| { | |
| "epoch": 27.7, | |
| "learning_rate": 3.553082494562354e-07, | |
| "loss": 0.0598, | |
| "step": 1302 | |
| }, | |
| { | |
| "epoch": 27.72, | |
| "learning_rate": 3.487972729029887e-07, | |
| "loss": 0.0652, | |
| "step": 1303 | |
| }, | |
| { | |
| "epoch": 27.74, | |
| "learning_rate": 3.423454467540699e-07, | |
| "loss": 0.0679, | |
| "step": 1304 | |
| }, | |
| { | |
| "epoch": 27.77, | |
| "learning_rate": 3.359528105515064e-07, | |
| "loss": 0.0632, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 27.79, | |
| "learning_rate": 3.2961940347456033e-07, | |
| "loss": 0.0538, | |
| "step": 1306 | |
| }, | |
| { | |
| "epoch": 27.81, | |
| "learning_rate": 3.2334526433949077e-07, | |
| "loss": 0.0559, | |
| "step": 1307 | |
| }, | |
| { | |
| "epoch": 27.83, | |
| "learning_rate": 3.171304315993173e-07, | |
| "loss": 0.074, | |
| "step": 1308 | |
| }, | |
| { | |
| "epoch": 27.85, | |
| "learning_rate": 3.1097494334357824e-07, | |
| "loss": 0.0612, | |
| "step": 1309 | |
| }, | |
| { | |
| "epoch": 27.87, | |
| "learning_rate": 3.048788372981015e-07, | |
| "loss": 0.0753, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 27.89, | |
| "learning_rate": 2.988421508247741e-07, | |
| "loss": 0.0505, | |
| "step": 1311 | |
| }, | |
| { | |
| "epoch": 27.91, | |
| "learning_rate": 2.928649209213119e-07, | |
| "loss": 0.061, | |
| "step": 1312 | |
| }, | |
| { | |
| "epoch": 27.94, | |
| "learning_rate": 2.869471842210292e-07, | |
| "loss": 0.0764, | |
| "step": 1313 | |
| }, | |
| { | |
| "epoch": 27.96, | |
| "learning_rate": 2.8108897699262174e-07, | |
| "loss": 0.0808, | |
| "step": 1314 | |
| }, | |
| { | |
| "epoch": 27.98, | |
| "learning_rate": 2.752903351399383e-07, | |
| "loss": 0.0526, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 28.0, | |
| "learning_rate": 2.6955129420176193e-07, | |
| "loss": 0.075, | |
| "step": 1316 | |
| }, | |
| { | |
| "epoch": 28.02, | |
| "learning_rate": 2.638718893515946e-07, | |
| "loss": 0.0724, | |
| "step": 1317 | |
| }, | |
| { | |
| "epoch": 28.04, | |
| "learning_rate": 2.582521553974404e-07, | |
| "loss": 0.0698, | |
| "step": 1318 | |
| }, | |
| { | |
| "epoch": 28.06, | |
| "learning_rate": 2.526921267815896e-07, | |
| "loss": 0.0682, | |
| "step": 1319 | |
| }, | |
| { | |
| "epoch": 28.09, | |
| "learning_rate": 2.4719183758041056e-07, | |
| "loss": 0.0759, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 28.11, | |
| "learning_rate": 2.4175132150413895e-07, | |
| "loss": 0.0489, | |
| "step": 1321 | |
| }, | |
| { | |
| "epoch": 28.13, | |
| "learning_rate": 2.3637061189667687e-07, | |
| "loss": 0.0606, | |
| "step": 1322 | |
| }, | |
| { | |
| "epoch": 28.15, | |
| "learning_rate": 2.3104974173537742e-07, | |
| "loss": 0.0659, | |
| "step": 1323 | |
| }, | |
| { | |
| "epoch": 28.17, | |
| "learning_rate": 2.2578874363085368e-07, | |
| "loss": 0.0586, | |
| "step": 1324 | |
| }, | |
| { | |
| "epoch": 28.19, | |
| "learning_rate": 2.2058764982677117e-07, | |
| "loss": 0.0588, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 28.21, | |
| "learning_rate": 2.1544649219965574e-07, | |
| "loss": 0.0548, | |
| "step": 1326 | |
| }, | |
| { | |
| "epoch": 28.23, | |
| "learning_rate": 2.1036530225869268e-07, | |
| "loss": 0.0704, | |
| "step": 1327 | |
| }, | |
| { | |
| "epoch": 28.26, | |
| "learning_rate": 2.05344111145539e-07, | |
| "loss": 0.0712, | |
| "step": 1328 | |
| }, | |
| { | |
| "epoch": 28.28, | |
| "learning_rate": 2.0038294963413251e-07, | |
| "loss": 0.0581, | |
| "step": 1329 | |
| }, | |
| { | |
| "epoch": 28.3, | |
| "learning_rate": 1.954818481304943e-07, | |
| "loss": 0.0693, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 28.32, | |
| "learning_rate": 1.9064083667255317e-07, | |
| "loss": 0.0792, | |
| "step": 1331 | |
| }, | |
| { | |
| "epoch": 28.34, | |
| "learning_rate": 1.8585994492995917e-07, | |
| "loss": 0.0605, | |
| "step": 1332 | |
| }, | |
| { | |
| "epoch": 28.36, | |
| "learning_rate": 1.8113920220389492e-07, | |
| "loss": 0.0661, | |
| "step": 1333 | |
| }, | |
| { | |
| "epoch": 28.38, | |
| "learning_rate": 1.7647863742690562e-07, | |
| "loss": 0.0458, | |
| "step": 1334 | |
| }, | |
| { | |
| "epoch": 28.4, | |
| "learning_rate": 1.7187827916271382e-07, | |
| "loss": 0.0487, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 28.43, | |
| "learning_rate": 1.673381556060516e-07, | |
| "loss": 0.056, | |
| "step": 1336 | |
| }, | |
| { | |
| "epoch": 28.45, | |
| "learning_rate": 1.628582945824786e-07, | |
| "loss": 0.0647, | |
| "step": 1337 | |
| }, | |
| { | |
| "epoch": 28.47, | |
| "learning_rate": 1.5843872354822099e-07, | |
| "loss": 0.0552, | |
| "step": 1338 | |
| }, | |
| { | |
| "epoch": 28.49, | |
| "learning_rate": 1.540794695899972e-07, | |
| "loss": 0.0846, | |
| "step": 1339 | |
| }, | |
| { | |
| "epoch": 28.51, | |
| "learning_rate": 1.497805594248536e-07, | |
| "loss": 0.0742, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 28.53, | |
| "learning_rate": 1.4554201940000123e-07, | |
| "loss": 0.0701, | |
| "step": 1341 | |
| }, | |
| { | |
| "epoch": 28.55, | |
| "learning_rate": 1.4136387549265383e-07, | |
| "loss": 0.0666, | |
| "step": 1342 | |
| }, | |
| { | |
| "epoch": 28.57, | |
| "learning_rate": 1.3724615330987013e-07, | |
| "loss": 0.0603, | |
| "step": 1343 | |
| }, | |
| { | |
| "epoch": 28.6, | |
| "learning_rate": 1.3318887808839276e-07, | |
| "loss": 0.0625, | |
| "step": 1344 | |
| }, | |
| { | |
| "epoch": 28.62, | |
| "learning_rate": 1.2919207469449747e-07, | |
| "loss": 0.0639, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 28.64, | |
| "learning_rate": 1.2525576762383973e-07, | |
| "loss": 0.0651, | |
| "step": 1346 | |
| }, | |
| { | |
| "epoch": 28.66, | |
| "learning_rate": 1.21379981001305e-07, | |
| "loss": 0.0586, | |
| "step": 1347 | |
| }, | |
| { | |
| "epoch": 28.68, | |
| "learning_rate": 1.1756473858085982e-07, | |
| "loss": 0.057, | |
| "step": 1348 | |
| }, | |
| { | |
| "epoch": 28.7, | |
| "learning_rate": 1.1381006374540538e-07, | |
| "loss": 0.0623, | |
| "step": 1349 | |
| }, | |
| { | |
| "epoch": 28.72, | |
| "learning_rate": 1.1011597950663866e-07, | |
| "loss": 0.062, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 28.74, | |
| "learning_rate": 1.0648250850490483e-07, | |
| "loss": 0.0565, | |
| "step": 1351 | |
| }, | |
| { | |
| "epoch": 28.77, | |
| "learning_rate": 1.0290967300906507e-07, | |
| "loss": 0.073, | |
| "step": 1352 | |
| }, | |
| { | |
| "epoch": 28.79, | |
| "learning_rate": 9.939749491635343e-08, | |
| "loss": 0.0613, | |
| "step": 1353 | |
| }, | |
| { | |
| "epoch": 28.81, | |
| "learning_rate": 9.594599575225017e-08, | |
| "loss": 0.0714, | |
| "step": 1354 | |
| }, | |
| { | |
| "epoch": 28.83, | |
| "learning_rate": 9.255519667034196e-08, | |
| "loss": 0.0833, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 28.85, | |
| "learning_rate": 8.922511845219972e-08, | |
| "loss": 0.0628, | |
| "step": 1356 | |
| }, | |
| { | |
| "epoch": 28.87, | |
| "learning_rate": 8.595578150724316e-08, | |
| "loss": 0.0679, | |
| "step": 1357 | |
| }, | |
| { | |
| "epoch": 28.89, | |
| "learning_rate": 8.274720587262641e-08, | |
| "loss": 0.0663, | |
| "step": 1358 | |
| }, | |
| { | |
| "epoch": 28.91, | |
| "learning_rate": 7.959941121310266e-08, | |
| "loss": 0.0482, | |
| "step": 1359 | |
| }, | |
| { | |
| "epoch": 28.94, | |
| "learning_rate": 7.651241682091415e-08, | |
| "loss": 0.0637, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 28.96, | |
| "learning_rate": 7.348624161566786e-08, | |
| "loss": 0.0604, | |
| "step": 1361 | |
| }, | |
| { | |
| "epoch": 28.98, | |
| "learning_rate": 7.052090414422119e-08, | |
| "loss": 0.0718, | |
| "step": 1362 | |
| }, | |
| { | |
| "epoch": 29.0, | |
| "learning_rate": 6.761642258056977e-08, | |
| "loss": 0.0898, | |
| "step": 1363 | |
| }, | |
| { | |
| "epoch": 29.02, | |
| "learning_rate": 6.477281472573537e-08, | |
| "loss": 0.0737, | |
| "step": 1364 | |
| }, | |
| { | |
| "epoch": 29.04, | |
| "learning_rate": 6.199009800765265e-08, | |
| "loss": 0.08, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 29.06, | |
| "learning_rate": 5.926828948107033e-08, | |
| "loss": 0.0654, | |
| "step": 1366 | |
| }, | |
| { | |
| "epoch": 29.09, | |
| "learning_rate": 5.660740582743907e-08, | |
| "loss": 0.0639, | |
| "step": 1367 | |
| }, | |
| { | |
| "epoch": 29.11, | |
| "learning_rate": 5.400746335481488e-08, | |
| "loss": 0.0708, | |
| "step": 1368 | |
| }, | |
| { | |
| "epoch": 29.13, | |
| "learning_rate": 5.1468477997755894e-08, | |
| "loss": 0.0571, | |
| "step": 1369 | |
| }, | |
| { | |
| "epoch": 29.15, | |
| "learning_rate": 4.899046531722351e-08, | |
| "loss": 0.0745, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 29.17, | |
| "learning_rate": 4.657344050049251e-08, | |
| "loss": 0.0573, | |
| "step": 1371 | |
| }, | |
| { | |
| "epoch": 29.19, | |
| "learning_rate": 4.42174183610522e-08, | |
| "loss": 0.0503, | |
| "step": 1372 | |
| }, | |
| { | |
| "epoch": 29.21, | |
| "learning_rate": 4.192241333851876e-08, | |
| "loss": 0.0639, | |
| "step": 1373 | |
| }, | |
| { | |
| "epoch": 29.23, | |
| "learning_rate": 3.96884394985475e-08, | |
| "loss": 0.0424, | |
| "step": 1374 | |
| }, | |
| { | |
| "epoch": 29.26, | |
| "learning_rate": 3.7515510532740716e-08, | |
| "loss": 0.0828, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 29.28, | |
| "learning_rate": 3.540363975857109e-08, | |
| "loss": 0.0695, | |
| "step": 1376 | |
| }, | |
| { | |
| "epoch": 29.3, | |
| "learning_rate": 3.335284011929951e-08, | |
| "loss": 0.064, | |
| "step": 1377 | |
| }, | |
| { | |
| "epoch": 29.32, | |
| "learning_rate": 3.136312418388854e-08, | |
| "loss": 0.0717, | |
| "step": 1378 | |
| }, | |
| { | |
| "epoch": 29.34, | |
| "learning_rate": 2.9434504146933494e-08, | |
| "loss": 0.0575, | |
| "step": 1379 | |
| }, | |
| { | |
| "epoch": 29.36, | |
| "learning_rate": 2.7566991828583688e-08, | |
| "loss": 0.0544, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 29.38, | |
| "learning_rate": 2.5760598674470226e-08, | |
| "loss": 0.0602, | |
| "step": 1381 | |
| }, | |
| { | |
| "epoch": 29.4, | |
| "learning_rate": 2.4015335755634973e-08, | |
| "loss": 0.0689, | |
| "step": 1382 | |
| }, | |
| { | |
| "epoch": 29.43, | |
| "learning_rate": 2.2331213768468363e-08, | |
| "loss": 0.0794, | |
| "step": 1383 | |
| }, | |
| { | |
| "epoch": 29.45, | |
| "learning_rate": 2.0708243034636144e-08, | |
| "loss": 0.0763, | |
| "step": 1384 | |
| }, | |
| { | |
| "epoch": 29.47, | |
| "learning_rate": 1.9146433501019413e-08, | |
| "loss": 0.0596, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 29.49, | |
| "learning_rate": 1.7645794739654665e-08, | |
| "loss": 0.0698, | |
| "step": 1386 | |
| }, | |
| { | |
| "epoch": 29.51, | |
| "learning_rate": 1.6206335947676066e-08, | |
| "loss": 0.0646, | |
| "step": 1387 | |
| }, | |
| { | |
| "epoch": 29.53, | |
| "learning_rate": 1.4828065947254388e-08, | |
| "loss": 0.0561, | |
| "step": 1388 | |
| }, | |
| { | |
| "epoch": 29.55, | |
| "learning_rate": 1.351099318554705e-08, | |
| "loss": 0.074, | |
| "step": 1389 | |
| }, | |
| { | |
| "epoch": 29.57, | |
| "learning_rate": 1.2255125734644824e-08, | |
| "loss": 0.0614, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 29.6, | |
| "learning_rate": 1.106047129152299e-08, | |
| "loss": 0.075, | |
| "step": 1391 | |
| }, | |
| { | |
| "epoch": 29.62, | |
| "learning_rate": 9.927037177993593e-09, | |
| "loss": 0.0631, | |
| "step": 1392 | |
| }, | |
| { | |
| "epoch": 29.64, | |
| "learning_rate": 8.854830340661035e-09, | |
| "loss": 0.0486, | |
| "step": 1393 | |
| }, | |
| { | |
| "epoch": 29.66, | |
| "learning_rate": 7.84385735087878e-09, | |
| "loss": 0.068, | |
| "step": 1394 | |
| }, | |
| { | |
| "epoch": 29.68, | |
| "learning_rate": 6.8941244047116e-09, | |
| "loss": 0.0757, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 29.7, | |
| "learning_rate": 6.005637322891167e-09, | |
| "loss": 0.0793, | |
| "step": 1396 | |
| }, | |
| { | |
| "epoch": 29.72, | |
| "learning_rate": 5.178401550790524e-09, | |
| "loss": 0.0492, | |
| "step": 1397 | |
| }, | |
| { | |
| "epoch": 29.74, | |
| "learning_rate": 4.41242215837856e-09, | |
| "loss": 0.0566, | |
| "step": 1398 | |
| }, | |
| { | |
| "epoch": 29.77, | |
| "learning_rate": 3.7077038402022482e-09, | |
| "loss": 0.0706, | |
| "step": 1399 | |
| }, | |
| { | |
| "epoch": 29.79, | |
| "learning_rate": 3.0642509153444575e-09, | |
| "loss": 0.0753, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 29.81, | |
| "learning_rate": 2.4820673274095207e-09, | |
| "loss": 0.0805, | |
| "step": 1401 | |
| }, | |
| { | |
| "epoch": 29.83, | |
| "learning_rate": 1.9611566444888153e-09, | |
| "loss": 0.035, | |
| "step": 1402 | |
| }, | |
| { | |
| "epoch": 29.85, | |
| "learning_rate": 1.5015220591474422e-09, | |
| "loss": 0.0781, | |
| "step": 1403 | |
| }, | |
| { | |
| "epoch": 29.87, | |
| "learning_rate": 1.103166388398691e-09, | |
| "loss": 0.0735, | |
| "step": 1404 | |
| }, | |
| { | |
| "epoch": 29.89, | |
| "learning_rate": 7.66092073689606e-10, | |
| "loss": 0.0679, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 29.91, | |
| "learning_rate": 4.903011808843339e-10, | |
| "loss": 0.0547, | |
| "step": 1406 | |
| }, | |
| { | |
| "epoch": 29.94, | |
| "learning_rate": 2.75795400255241e-10, | |
| "loss": 0.063, | |
| "step": 1407 | |
| }, | |
| { | |
| "epoch": 29.96, | |
| "learning_rate": 1.2257604646515042e-10, | |
| "loss": 0.0651, | |
| "step": 1408 | |
| }, | |
| { | |
| "epoch": 29.98, | |
| "learning_rate": 3.064405856956221e-11, | |
| "loss": 0.0612, | |
| "step": 1409 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "learning_rate": 0.0, | |
| "loss": 0.0508, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "step": 1410, | |
| "total_flos": 2.71170780991488e+16, | |
| "train_loss": 1.8968729352908777, | |
| "train_runtime": 611.7167, | |
| "train_samples_per_second": 294.646, | |
| "train_steps_per_second": 2.305 | |
| } | |
| ], | |
| "max_steps": 1410, | |
| "num_train_epochs": 30, | |
| "total_flos": 2.71170780991488e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |