|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 102, |
|
"global_step": 102, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00980392156862745, |
|
"grad_norm": 2.500279188156128, |
|
"learning_rate": 1e-05, |
|
"loss": 2.3876, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0196078431372549, |
|
"grad_norm": 2.360731840133667, |
|
"learning_rate": 9.901960784313727e-06, |
|
"loss": 2.3224, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.029411764705882353, |
|
"grad_norm": 2.111973285675049, |
|
"learning_rate": 9.803921568627451e-06, |
|
"loss": 2.3377, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0392156862745098, |
|
"grad_norm": 1.7793817520141602, |
|
"learning_rate": 9.705882352941177e-06, |
|
"loss": 2.2592, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.049019607843137254, |
|
"grad_norm": 1.3986390829086304, |
|
"learning_rate": 9.607843137254903e-06, |
|
"loss": 2.2263, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.058823529411764705, |
|
"grad_norm": 1.1015570163726807, |
|
"learning_rate": 9.509803921568628e-06, |
|
"loss": 2.2227, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06862745098039216, |
|
"grad_norm": 0.9321516752243042, |
|
"learning_rate": 9.411764705882354e-06, |
|
"loss": 2.1582, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0784313725490196, |
|
"grad_norm": 0.8922587633132935, |
|
"learning_rate": 9.31372549019608e-06, |
|
"loss": 2.1078, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08823529411764706, |
|
"grad_norm": 0.9146968126296997, |
|
"learning_rate": 9.215686274509804e-06, |
|
"loss": 2.0554, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.09803921568627451, |
|
"grad_norm": 0.9473930597305298, |
|
"learning_rate": 9.11764705882353e-06, |
|
"loss": 2.0022, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.10784313725490197, |
|
"grad_norm": 0.9462062120437622, |
|
"learning_rate": 9.019607843137256e-06, |
|
"loss": 2.0462, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.11764705882352941, |
|
"grad_norm": 0.959138810634613, |
|
"learning_rate": 8.921568627450982e-06, |
|
"loss": 2.0612, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.12745098039215685, |
|
"grad_norm": 0.9512083530426025, |
|
"learning_rate": 8.823529411764707e-06, |
|
"loss": 1.9063, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.13725490196078433, |
|
"grad_norm": 0.9136151671409607, |
|
"learning_rate": 8.725490196078433e-06, |
|
"loss": 2.0061, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.14705882352941177, |
|
"grad_norm": 0.8621863722801208, |
|
"learning_rate": 8.627450980392157e-06, |
|
"loss": 1.9088, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1568627450980392, |
|
"grad_norm": 0.8003532290458679, |
|
"learning_rate": 8.529411764705883e-06, |
|
"loss": 1.8919, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 0.7299250364303589, |
|
"learning_rate": 8.43137254901961e-06, |
|
"loss": 1.8571, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.17647058823529413, |
|
"grad_norm": 0.6769949197769165, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.8633, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.18627450980392157, |
|
"grad_norm": 0.6527869701385498, |
|
"learning_rate": 8.23529411764706e-06, |
|
"loss": 1.7669, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.19607843137254902, |
|
"grad_norm": 0.6449242234230042, |
|
"learning_rate": 8.137254901960784e-06, |
|
"loss": 1.7996, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.20588235294117646, |
|
"grad_norm": 0.6480177044868469, |
|
"learning_rate": 8.03921568627451e-06, |
|
"loss": 1.7738, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.21568627450980393, |
|
"grad_norm": 0.6246963739395142, |
|
"learning_rate": 7.941176470588236e-06, |
|
"loss": 1.7386, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.22549019607843138, |
|
"grad_norm": 0.6275233626365662, |
|
"learning_rate": 7.84313725490196e-06, |
|
"loss": 1.8029, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.23529411764705882, |
|
"grad_norm": 0.5825409889221191, |
|
"learning_rate": 7.745098039215687e-06, |
|
"loss": 1.6673, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.24509803921568626, |
|
"grad_norm": 0.5436587929725647, |
|
"learning_rate": 7.647058823529411e-06, |
|
"loss": 1.6992, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2549019607843137, |
|
"grad_norm": 0.5287730097770691, |
|
"learning_rate": 7.549019607843138e-06, |
|
"loss": 1.6691, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.2647058823529412, |
|
"grad_norm": 0.5117806792259216, |
|
"learning_rate": 7.450980392156863e-06, |
|
"loss": 1.6736, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.27450980392156865, |
|
"grad_norm": 0.5041622519493103, |
|
"learning_rate": 7.352941176470589e-06, |
|
"loss": 1.6874, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.28431372549019607, |
|
"grad_norm": 0.47983038425445557, |
|
"learning_rate": 7.2549019607843145e-06, |
|
"loss": 1.6406, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.29411764705882354, |
|
"grad_norm": 0.4921911358833313, |
|
"learning_rate": 7.15686274509804e-06, |
|
"loss": 1.6127, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.30392156862745096, |
|
"grad_norm": 0.48269519209861755, |
|
"learning_rate": 7.058823529411766e-06, |
|
"loss": 1.6611, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.3137254901960784, |
|
"grad_norm": 0.5123285055160522, |
|
"learning_rate": 6.96078431372549e-06, |
|
"loss": 1.6019, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3235294117647059, |
|
"grad_norm": 0.4910086691379547, |
|
"learning_rate": 6.862745098039216e-06, |
|
"loss": 1.5817, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.49109798669815063, |
|
"learning_rate": 6.764705882352942e-06, |
|
"loss": 1.5944, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3431372549019608, |
|
"grad_norm": 0.4968704581260681, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.5942, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.35294117647058826, |
|
"grad_norm": 0.47293922305107117, |
|
"learning_rate": 6.568627450980393e-06, |
|
"loss": 1.564, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3627450980392157, |
|
"grad_norm": 0.44330501556396484, |
|
"learning_rate": 6.470588235294119e-06, |
|
"loss": 1.5762, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.37254901960784315, |
|
"grad_norm": 0.4455674886703491, |
|
"learning_rate": 6.372549019607843e-06, |
|
"loss": 1.5843, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.38235294117647056, |
|
"grad_norm": 0.41862353682518005, |
|
"learning_rate": 6.274509803921569e-06, |
|
"loss": 1.5127, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.39215686274509803, |
|
"grad_norm": 0.4261449873447418, |
|
"learning_rate": 6.176470588235295e-06, |
|
"loss": 1.5167, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.4019607843137255, |
|
"grad_norm": 0.4266234040260315, |
|
"learning_rate": 6.07843137254902e-06, |
|
"loss": 1.5048, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.4117647058823529, |
|
"grad_norm": 0.432207852602005, |
|
"learning_rate": 5.980392156862746e-06, |
|
"loss": 1.4852, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.4215686274509804, |
|
"grad_norm": 0.40910154581069946, |
|
"learning_rate": 5.882352941176471e-06, |
|
"loss": 1.4988, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.43137254901960786, |
|
"grad_norm": 0.4706984758377075, |
|
"learning_rate": 5.784313725490197e-06, |
|
"loss": 1.5645, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4411764705882353, |
|
"grad_norm": 0.40101850032806396, |
|
"learning_rate": 5.686274509803922e-06, |
|
"loss": 1.4994, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.45098039215686275, |
|
"grad_norm": 0.40438491106033325, |
|
"learning_rate": 5.588235294117647e-06, |
|
"loss": 1.5272, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.46078431372549017, |
|
"grad_norm": 0.39020276069641113, |
|
"learning_rate": 5.4901960784313735e-06, |
|
"loss": 1.4562, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.47058823529411764, |
|
"grad_norm": 0.37909573316574097, |
|
"learning_rate": 5.392156862745098e-06, |
|
"loss": 1.4873, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4803921568627451, |
|
"grad_norm": 0.3828938603401184, |
|
"learning_rate": 5.294117647058824e-06, |
|
"loss": 1.4798, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.49019607843137253, |
|
"grad_norm": 0.367044597864151, |
|
"learning_rate": 5.19607843137255e-06, |
|
"loss": 1.4328, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.36673134565353394, |
|
"learning_rate": 5.098039215686274e-06, |
|
"loss": 1.4307, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.5098039215686274, |
|
"grad_norm": 0.3599296808242798, |
|
"learning_rate": 5e-06, |
|
"loss": 1.4069, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5196078431372549, |
|
"grad_norm": 0.3603789806365967, |
|
"learning_rate": 4.901960784313726e-06, |
|
"loss": 1.4296, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.5294117647058824, |
|
"grad_norm": 0.3620091676712036, |
|
"learning_rate": 4.803921568627452e-06, |
|
"loss": 1.4063, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5392156862745098, |
|
"grad_norm": 0.33629655838012695, |
|
"learning_rate": 4.705882352941177e-06, |
|
"loss": 1.421, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.5490196078431373, |
|
"grad_norm": 0.3413692116737366, |
|
"learning_rate": 4.607843137254902e-06, |
|
"loss": 1.4518, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5588235294117647, |
|
"grad_norm": 0.3161819279193878, |
|
"learning_rate": 4.509803921568628e-06, |
|
"loss": 1.3914, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5686274509803921, |
|
"grad_norm": 0.3277413547039032, |
|
"learning_rate": 4.411764705882353e-06, |
|
"loss": 1.389, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5784313725490197, |
|
"grad_norm": 0.3128764033317566, |
|
"learning_rate": 4.313725490196079e-06, |
|
"loss": 1.395, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5882352941176471, |
|
"grad_norm": 0.31412273645401, |
|
"learning_rate": 4.215686274509805e-06, |
|
"loss": 1.3735, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5980392156862745, |
|
"grad_norm": 0.3194039762020111, |
|
"learning_rate": 4.11764705882353e-06, |
|
"loss": 1.4189, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.6078431372549019, |
|
"grad_norm": 0.3120681941509247, |
|
"learning_rate": 4.019607843137255e-06, |
|
"loss": 1.3911, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.6176470588235294, |
|
"grad_norm": 0.3160897493362427, |
|
"learning_rate": 3.92156862745098e-06, |
|
"loss": 1.3543, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.6274509803921569, |
|
"grad_norm": 0.31680962443351746, |
|
"learning_rate": 3.8235294117647055e-06, |
|
"loss": 1.3603, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.6372549019607843, |
|
"grad_norm": 0.31749823689460754, |
|
"learning_rate": 3.7254901960784316e-06, |
|
"loss": 1.4025, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6470588235294118, |
|
"grad_norm": 0.3060447573661804, |
|
"learning_rate": 3.6274509803921573e-06, |
|
"loss": 1.3412, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6568627450980392, |
|
"grad_norm": 0.3055213391780853, |
|
"learning_rate": 3.529411764705883e-06, |
|
"loss": 1.3913, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.3184523582458496, |
|
"learning_rate": 3.431372549019608e-06, |
|
"loss": 1.3994, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6764705882352942, |
|
"grad_norm": 0.30899760127067566, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.3694, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6862745098039216, |
|
"grad_norm": 0.29854196310043335, |
|
"learning_rate": 3.2352941176470594e-06, |
|
"loss": 1.3715, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.696078431372549, |
|
"grad_norm": 0.298645555973053, |
|
"learning_rate": 3.1372549019607846e-06, |
|
"loss": 1.3476, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.7058823529411765, |
|
"grad_norm": 0.2904506027698517, |
|
"learning_rate": 3.03921568627451e-06, |
|
"loss": 1.3476, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.7156862745098039, |
|
"grad_norm": 0.2939620912075043, |
|
"learning_rate": 2.9411764705882355e-06, |
|
"loss": 1.3666, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.7254901960784313, |
|
"grad_norm": 0.29882726073265076, |
|
"learning_rate": 2.843137254901961e-06, |
|
"loss": 1.347, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.7352941176470589, |
|
"grad_norm": 0.31818199157714844, |
|
"learning_rate": 2.7450980392156867e-06, |
|
"loss": 1.3821, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.7450980392156863, |
|
"grad_norm": 0.29951557517051697, |
|
"learning_rate": 2.647058823529412e-06, |
|
"loss": 1.3443, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7549019607843137, |
|
"grad_norm": 0.2927483320236206, |
|
"learning_rate": 2.549019607843137e-06, |
|
"loss": 1.3258, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.7647058823529411, |
|
"grad_norm": 0.2869877219200134, |
|
"learning_rate": 2.450980392156863e-06, |
|
"loss": 1.3304, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.7745098039215687, |
|
"grad_norm": 0.29732242226600647, |
|
"learning_rate": 2.3529411764705885e-06, |
|
"loss": 1.3465, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.7843137254901961, |
|
"grad_norm": 0.30281081795692444, |
|
"learning_rate": 2.254901960784314e-06, |
|
"loss": 1.319, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7941176470588235, |
|
"grad_norm": 0.2886880040168762, |
|
"learning_rate": 2.1568627450980393e-06, |
|
"loss": 1.3312, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.803921568627451, |
|
"grad_norm": 0.29117289185523987, |
|
"learning_rate": 2.058823529411765e-06, |
|
"loss": 1.3463, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.8137254901960784, |
|
"grad_norm": 0.28929680585861206, |
|
"learning_rate": 1.96078431372549e-06, |
|
"loss": 1.3469, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.8235294117647058, |
|
"grad_norm": 0.28210750222206116, |
|
"learning_rate": 1.8627450980392158e-06, |
|
"loss": 1.3329, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 0.282003790140152, |
|
"learning_rate": 1.7647058823529414e-06, |
|
"loss": 1.2958, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.8431372549019608, |
|
"grad_norm": 0.3041072487831116, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 1.3422, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.8529411764705882, |
|
"grad_norm": 0.28907510638237, |
|
"learning_rate": 1.5686274509803923e-06, |
|
"loss": 1.355, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.8627450980392157, |
|
"grad_norm": 0.28159913420677185, |
|
"learning_rate": 1.4705882352941177e-06, |
|
"loss": 1.3366, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.8725490196078431, |
|
"grad_norm": 0.28043174743652344, |
|
"learning_rate": 1.3725490196078434e-06, |
|
"loss": 1.3384, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.8823529411764706, |
|
"grad_norm": 0.29932349920272827, |
|
"learning_rate": 1.2745098039215686e-06, |
|
"loss": 1.2866, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8921568627450981, |
|
"grad_norm": 0.2869769036769867, |
|
"learning_rate": 1.1764705882352942e-06, |
|
"loss": 1.3329, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.9019607843137255, |
|
"grad_norm": 0.27412521839141846, |
|
"learning_rate": 1.0784313725490197e-06, |
|
"loss": 1.3273, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.9117647058823529, |
|
"grad_norm": 0.291384220123291, |
|
"learning_rate": 9.80392156862745e-07, |
|
"loss": 1.3566, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.9215686274509803, |
|
"grad_norm": 0.28981104493141174, |
|
"learning_rate": 8.823529411764707e-07, |
|
"loss": 1.3279, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.9313725490196079, |
|
"grad_norm": 0.28091126680374146, |
|
"learning_rate": 7.843137254901962e-07, |
|
"loss": 1.2947, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.9411764705882353, |
|
"grad_norm": 0.28839319944381714, |
|
"learning_rate": 6.862745098039217e-07, |
|
"loss": 1.3098, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.9509803921568627, |
|
"grad_norm": 0.27996671199798584, |
|
"learning_rate": 5.882352941176471e-07, |
|
"loss": 1.3001, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.9607843137254902, |
|
"grad_norm": 0.27735868096351624, |
|
"learning_rate": 4.901960784313725e-07, |
|
"loss": 1.3123, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.9705882352941176, |
|
"grad_norm": 0.2749045193195343, |
|
"learning_rate": 3.921568627450981e-07, |
|
"loss": 1.3244, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.9803921568627451, |
|
"grad_norm": 0.28808385133743286, |
|
"learning_rate": 2.9411764705882356e-07, |
|
"loss": 1.2847, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9901960784313726, |
|
"grad_norm": 0.28120607137680054, |
|
"learning_rate": 1.9607843137254904e-07, |
|
"loss": 1.2952, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.2849506437778473, |
|
"learning_rate": 9.803921568627452e-08, |
|
"loss": 1.3353, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.329064965248108, |
|
"eval_runtime": 2.3109, |
|
"eval_samples_per_second": 6.491, |
|
"eval_steps_per_second": 0.865, |
|
"step": 102 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 102, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 0, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.1010882465169408e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|