|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.6451612903225806, |
|
"global_step": 80000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.97983870967742e-05, |
|
"loss": 6.7059, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959677419354839e-05, |
|
"loss": 5.6289, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.939516129032259e-05, |
|
"loss": 5.1456, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9193548387096775e-05, |
|
"loss": 4.7873, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.899193548387097e-05, |
|
"loss": 4.6802, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8790322580645164e-05, |
|
"loss": 4.3627, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.858870967741936e-05, |
|
"loss": 4.196, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.8387096774193554e-05, |
|
"loss": 3.7748, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.818548387096775e-05, |
|
"loss": 3.8727, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.7983870967741937e-05, |
|
"loss": 3.646, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.778225806451613e-05, |
|
"loss": 3.6242, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.7580645161290326e-05, |
|
"loss": 3.5577, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.737903225806452e-05, |
|
"loss": 3.5461, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.7177419354838716e-05, |
|
"loss": 3.4455, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.697580645161291e-05, |
|
"loss": 3.4798, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.67741935483871e-05, |
|
"loss": 3.4644, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.657258064516129e-05, |
|
"loss": 3.3689, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.637096774193548e-05, |
|
"loss": 3.3166, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.6169354838709676e-05, |
|
"loss": 3.3226, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.596774193548387e-05, |
|
"loss": 3.283, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.5766129032258065e-05, |
|
"loss": 3.1365, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.556451612903226e-05, |
|
"loss": 3.23, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.5362903225806455e-05, |
|
"loss": 3.2074, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.516129032258064e-05, |
|
"loss": 3.221, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.495967741935484e-05, |
|
"loss": 3.1792, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.475806451612903e-05, |
|
"loss": 3.0746, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.455645161290323e-05, |
|
"loss": 3.1394, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.435483870967742e-05, |
|
"loss": 3.0609, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.415322580645162e-05, |
|
"loss": 3.1135, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.395161290322581e-05, |
|
"loss": 3.0392, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.375e-05, |
|
"loss": 3.0255, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.3548387096774194e-05, |
|
"loss": 3.0095, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.334677419354839e-05, |
|
"loss": 3.0491, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.3145161290322584e-05, |
|
"loss": 3.068, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.294354838709678e-05, |
|
"loss": 2.9763, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.2741935483870973e-05, |
|
"loss": 2.985, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.254032258064516e-05, |
|
"loss": 2.9917, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.2338709677419356e-05, |
|
"loss": 3.0161, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.213709677419355e-05, |
|
"loss": 2.9634, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.1935483870967746e-05, |
|
"loss": 2.9889, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.173387096774194e-05, |
|
"loss": 3.0213, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.1532258064516135e-05, |
|
"loss": 2.9616, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.133064516129033e-05, |
|
"loss": 2.8978, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.112903225806452e-05, |
|
"loss": 2.7943, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.092741935483871e-05, |
|
"loss": 2.9148, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.072580645161291e-05, |
|
"loss": 2.9236, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.0524193548387096e-05, |
|
"loss": 2.8025, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.032258064516129e-05, |
|
"loss": 2.8641, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.0120967741935485e-05, |
|
"loss": 2.8458, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.991935483870968e-05, |
|
"loss": 2.8769, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.971774193548387e-05, |
|
"loss": 2.9281, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.951612903225806e-05, |
|
"loss": 2.8934, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.931451612903226e-05, |
|
"loss": 2.8421, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.911290322580645e-05, |
|
"loss": 2.9583, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.891129032258065e-05, |
|
"loss": 2.8541, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.870967741935484e-05, |
|
"loss": 2.9187, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.8508064516129036e-05, |
|
"loss": 2.9765, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.8306451612903224e-05, |
|
"loss": 2.6983, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.810483870967742e-05, |
|
"loss": 2.8056, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.7903225806451614e-05, |
|
"loss": 2.7826, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.770161290322581e-05, |
|
"loss": 2.8993, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 2.931, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.72983870967742e-05, |
|
"loss": 2.8276, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.7096774193548386e-05, |
|
"loss": 2.8348, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.689516129032258e-05, |
|
"loss": 2.8317, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.6693548387096776e-05, |
|
"loss": 2.8061, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.649193548387097e-05, |
|
"loss": 2.7982, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.6290322580645165e-05, |
|
"loss": 2.8439, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.608870967741936e-05, |
|
"loss": 2.7924, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.5887096774193555e-05, |
|
"loss": 2.7077, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.568548387096774e-05, |
|
"loss": 2.8267, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.548387096774194e-05, |
|
"loss": 2.8308, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.528225806451613e-05, |
|
"loss": 2.7554, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.508064516129033e-05, |
|
"loss": 2.7693, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.487903225806452e-05, |
|
"loss": 2.7417, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.467741935483872e-05, |
|
"loss": 2.6836, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.4475806451612905e-05, |
|
"loss": 2.7334, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.427419354838709e-05, |
|
"loss": 2.9839, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.407258064516129e-05, |
|
"loss": 2.7495, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.387096774193548e-05, |
|
"loss": 2.767, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.366935483870968e-05, |
|
"loss": 2.7316, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.346774193548387e-05, |
|
"loss": 2.7787, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.3266129032258067e-05, |
|
"loss": 2.7504, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.306451612903226e-05, |
|
"loss": 2.6913, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.286290322580645e-05, |
|
"loss": 2.7743, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.2661290322580644e-05, |
|
"loss": 2.7656, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.245967741935484e-05, |
|
"loss": 2.7828, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.2258064516129034e-05, |
|
"loss": 2.7927, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.205645161290323e-05, |
|
"loss": 2.7036, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.185483870967742e-05, |
|
"loss": 2.6734, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.165322580645161e-05, |
|
"loss": 2.7777, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.1451612903225806e-05, |
|
"loss": 2.7163, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.125e-05, |
|
"loss": 2.7279, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.1048387096774195e-05, |
|
"loss": 2.7209, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.084677419354839e-05, |
|
"loss": 2.6712, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.0645161290322585e-05, |
|
"loss": 2.7012, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.0443548387096776e-05, |
|
"loss": 2.7272, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.024193548387097e-05, |
|
"loss": 2.7072, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.0040322580645162e-05, |
|
"loss": 2.6696, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.9838709677419357e-05, |
|
"loss": 2.6467, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.9637096774193552e-05, |
|
"loss": 2.7601, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.9435483870967743e-05, |
|
"loss": 2.7173, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.9233870967741938e-05, |
|
"loss": 2.6568, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.9032258064516133e-05, |
|
"loss": 2.713, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.8830645161290328e-05, |
|
"loss": 2.7691, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.862903225806452e-05, |
|
"loss": 2.7268, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.8427419354838714e-05, |
|
"loss": 2.5954, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.822580645161291e-05, |
|
"loss": 2.6935, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.8024193548387097e-05, |
|
"loss": 2.6538, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.7822580645161288e-05, |
|
"loss": 2.7291, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.7620967741935483e-05, |
|
"loss": 2.6463, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.7419354838709678e-05, |
|
"loss": 2.7153, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.721774193548387e-05, |
|
"loss": 2.6529, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.7016129032258064e-05, |
|
"loss": 2.6653, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.681451612903226e-05, |
|
"loss": 2.6571, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.661290322580645e-05, |
|
"loss": 2.6362, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.6411290322580645e-05, |
|
"loss": 2.709, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.620967741935484e-05, |
|
"loss": 2.6777, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6008064516129034e-05, |
|
"loss": 2.7051, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.5806451612903226e-05, |
|
"loss": 2.6077, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.560483870967742e-05, |
|
"loss": 2.6631, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5403225806451615e-05, |
|
"loss": 2.6302, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5201612903225806e-05, |
|
"loss": 2.605, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5e-05, |
|
"loss": 2.5936, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.4798387096774196e-05, |
|
"loss": 2.6623, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4596774193548387e-05, |
|
"loss": 2.7046, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4395161290322582e-05, |
|
"loss": 2.5976, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.4193548387096777e-05, |
|
"loss": 2.6664, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3991935483870968e-05, |
|
"loss": 2.6509, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3790322580645163e-05, |
|
"loss": 2.6651, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.3588709677419358e-05, |
|
"loss": 2.6349, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.338709677419355e-05, |
|
"loss": 2.6451, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.318548387096774e-05, |
|
"loss": 2.6604, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.2983870967741935e-05, |
|
"loss": 2.6519, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.278225806451613e-05, |
|
"loss": 2.6072, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.258064516129032e-05, |
|
"loss": 2.6437, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.2379032258064516e-05, |
|
"loss": 2.6558, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.217741935483871e-05, |
|
"loss": 2.5988, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.1975806451612906e-05, |
|
"loss": 2.646, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.1774193548387097e-05, |
|
"loss": 2.6395, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.1572580645161292e-05, |
|
"loss": 2.6216, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.1370967741935487e-05, |
|
"loss": 2.6985, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.1169354838709678e-05, |
|
"loss": 2.6508, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.0967741935483873e-05, |
|
"loss": 2.6124, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.0766129032258068e-05, |
|
"loss": 2.5642, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.056451612903226e-05, |
|
"loss": 2.6297, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.0362903225806454e-05, |
|
"loss": 2.5099, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.0161290322580645e-05, |
|
"loss": 2.5825, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.995967741935484e-05, |
|
"loss": 2.6509, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.975806451612903e-05, |
|
"loss": 2.5801, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9556451612903226e-05, |
|
"loss": 2.6328, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.935483870967742e-05, |
|
"loss": 2.5998, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9153225806451612e-05, |
|
"loss": 2.6794, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8951612903225807e-05, |
|
"loss": 2.6152, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 2.5854, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8548387096774193e-05, |
|
"loss": 2.587, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8346774193548388e-05, |
|
"loss": 2.5791, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8145161290322583e-05, |
|
"loss": 2.5613, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.7943548387096777e-05, |
|
"loss": 2.581, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.774193548387097e-05, |
|
"loss": 2.5466, |
|
"step": 80000 |
|
} |
|
], |
|
"max_steps": 124000, |
|
"num_train_epochs": 9223372036854775807, |
|
"total_flos": 1.18874494205952e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|