|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 68000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9632352941176476e-05, |
|
"loss": 6.715, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9264705882352944e-05, |
|
"loss": 5.501, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.889705882352941e-05, |
|
"loss": 5.1983, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.8529411764705885e-05, |
|
"loss": 4.7139, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.816176470588236e-05, |
|
"loss": 4.4966, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.7794117647058826e-05, |
|
"loss": 4.266, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.742647058823529e-05, |
|
"loss": 4.0875, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.705882352941177e-05, |
|
"loss": 3.99, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.669117647058824e-05, |
|
"loss": 3.7911, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.632352941176471e-05, |
|
"loss": 3.7007, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.5955882352941176e-05, |
|
"loss": 3.5607, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.558823529411765e-05, |
|
"loss": 3.5668, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.522058823529412e-05, |
|
"loss": 3.455, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.485294117647059e-05, |
|
"loss": 3.4141, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.448529411764706e-05, |
|
"loss": 3.3723, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.411764705882353e-05, |
|
"loss": 3.302, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.375e-05, |
|
"loss": 3.3409, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.3382352941176474e-05, |
|
"loss": 3.2472, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.301470588235295e-05, |
|
"loss": 3.2675, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.2647058823529415e-05, |
|
"loss": 3.2344, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.227941176470588e-05, |
|
"loss": 3.2457, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.1911764705882356e-05, |
|
"loss": 3.0852, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.154411764705883e-05, |
|
"loss": 3.1542, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.11764705882353e-05, |
|
"loss": 3.0649, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.0808823529411765e-05, |
|
"loss": 3.088, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.044117647058824e-05, |
|
"loss": 3.1423, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.007352941176471e-05, |
|
"loss": 3.1275, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.970588235294117e-05, |
|
"loss": 3.1152, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.933823529411765e-05, |
|
"loss": 3.0634, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.897058823529412e-05, |
|
"loss": 3.0047, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.8602941176470595e-05, |
|
"loss": 3.1367, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.8235294117647055e-05, |
|
"loss": 3.0443, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.786764705882353e-05, |
|
"loss": 3.031, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 3.0197, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.713235294117647e-05, |
|
"loss": 2.9507, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.6764705882352945e-05, |
|
"loss": 3.0342, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.639705882352941e-05, |
|
"loss": 2.9738, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.6029411764705886e-05, |
|
"loss": 2.9074, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.566176470588235e-05, |
|
"loss": 3.0614, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.529411764705883e-05, |
|
"loss": 2.954, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.4926470588235294e-05, |
|
"loss": 2.9487, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.455882352941177e-05, |
|
"loss": 2.9129, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.4191176470588236e-05, |
|
"loss": 2.9668, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.382352941176471e-05, |
|
"loss": 2.9865, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.345588235294118e-05, |
|
"loss": 2.8746, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.308823529411765e-05, |
|
"loss": 2.9399, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.272058823529412e-05, |
|
"loss": 2.9166, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.235294117647059e-05, |
|
"loss": 2.8706, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.198529411764706e-05, |
|
"loss": 2.9061, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.161764705882353e-05, |
|
"loss": 2.8416, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.125e-05, |
|
"loss": 2.9265, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.0882352941176475e-05, |
|
"loss": 2.8682, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.0514705882352945e-05, |
|
"loss": 2.9347, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.0147058823529413e-05, |
|
"loss": 2.828, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.9779411764705883e-05, |
|
"loss": 2.8787, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.9411764705882354e-05, |
|
"loss": 2.8648, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.9044117647058828e-05, |
|
"loss": 2.8403, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.8676470588235295e-05, |
|
"loss": 2.8271, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.8308823529411766e-05, |
|
"loss": 2.878, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.7941176470588236e-05, |
|
"loss": 2.8391, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.757352941176471e-05, |
|
"loss": 2.8274, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.7205882352941174e-05, |
|
"loss": 2.7774, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.6838235294117648e-05, |
|
"loss": 2.8637, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.647058823529412e-05, |
|
"loss": 2.8113, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6102941176470593e-05, |
|
"loss": 2.8401, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5735294117647057e-05, |
|
"loss": 2.8176, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.536764705882353e-05, |
|
"loss": 2.837, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5e-05, |
|
"loss": 2.8031, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4632352941176472e-05, |
|
"loss": 2.8055, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4264705882352942e-05, |
|
"loss": 2.8223, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3897058823529413e-05, |
|
"loss": 2.8357, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.3529411764705884e-05, |
|
"loss": 2.8074, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.3161764705882354e-05, |
|
"loss": 2.7696, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.2794117647058825e-05, |
|
"loss": 2.7162, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.2426470588235296e-05, |
|
"loss": 2.792, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.2058823529411766e-05, |
|
"loss": 2.7908, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.1691176470588237e-05, |
|
"loss": 2.8242, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.1323529411764707e-05, |
|
"loss": 2.7592, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.0955882352941178e-05, |
|
"loss": 2.7395, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.058823529411765e-05, |
|
"loss": 2.7354, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.022058823529412e-05, |
|
"loss": 2.7621, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9852941176470586e-05, |
|
"loss": 2.7514, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.948529411764706e-05, |
|
"loss": 2.7455, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9117647058823528e-05, |
|
"loss": 2.7615, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 2.7548, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8382352941176472e-05, |
|
"loss": 2.6271, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8014705882352943e-05, |
|
"loss": 2.7375, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.7647058823529414e-05, |
|
"loss": 2.7691, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.7279411764705884e-05, |
|
"loss": 2.6821, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.6911764705882355e-05, |
|
"loss": 2.8106, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.6544117647058825e-05, |
|
"loss": 2.7453, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.6176470588235296e-05, |
|
"loss": 2.8096, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.5808823529411763e-05, |
|
"loss": 2.7674, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5441176470588237e-05, |
|
"loss": 2.7471, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5073529411764706e-05, |
|
"loss": 2.7471, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4705882352941177e-05, |
|
"loss": 2.6936, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4338235294117647e-05, |
|
"loss": 2.6782, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3970588235294118e-05, |
|
"loss": 2.7112, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3602941176470587e-05, |
|
"loss": 2.7102, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.323529411764706e-05, |
|
"loss": 2.7814, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2867647058823528e-05, |
|
"loss": 2.7376, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.25e-05, |
|
"loss": 2.7446, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2132352941176471e-05, |
|
"loss": 2.6935, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.1764705882352942e-05, |
|
"loss": 2.7415, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1397058823529412e-05, |
|
"loss": 2.6912, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1029411764705883e-05, |
|
"loss": 2.7554, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0661764705882354e-05, |
|
"loss": 2.7384, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0294117647058824e-05, |
|
"loss": 2.6772, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.926470588235293e-06, |
|
"loss": 2.72, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.558823529411764e-06, |
|
"loss": 2.6073, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.191176470588236e-06, |
|
"loss": 2.719, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.823529411764707e-06, |
|
"loss": 2.6941, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.455882352941177e-06, |
|
"loss": 2.7565, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.088235294117648e-06, |
|
"loss": 2.6464, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.720588235294119e-06, |
|
"loss": 2.5853, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.3529411764705884e-06, |
|
"loss": 2.6563, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.985294117647059e-06, |
|
"loss": 2.686, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.61764705882353e-06, |
|
"loss": 2.7286, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.25e-06, |
|
"loss": 2.683, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.882352941176471e-06, |
|
"loss": 2.7328, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.5147058823529415e-06, |
|
"loss": 2.6955, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.147058823529412e-06, |
|
"loss": 2.6632, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.779411764705882e-06, |
|
"loss": 2.7731, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.411764705882353e-06, |
|
"loss": 2.7078, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.044117647058824e-06, |
|
"loss": 2.6845, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.6764705882352942e-06, |
|
"loss": 2.621, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.308823529411765e-06, |
|
"loss": 2.7139, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.9411764705882355e-06, |
|
"loss": 2.6447, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.573529411764706e-06, |
|
"loss": 2.5854, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.2058823529411767e-06, |
|
"loss": 2.6676, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.8382352941176471e-06, |
|
"loss": 2.6336, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.4705882352941177e-06, |
|
"loss": 2.7154, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.1029411764705884e-06, |
|
"loss": 2.6674, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.352941176470589e-07, |
|
"loss": 2.6998, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.6764705882352943e-07, |
|
"loss": 2.7515, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7307, |
|
"step": 68000 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 68000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 2000, |
|
"total_flos": 1.137142923264e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|