|
{ |
|
"best_metric": 66.9763, |
|
"best_model_checkpoint": "Meta-Llama-3.1-8B-Instruct-finetuned/checkpoint-400", |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0025, |
|
"grad_norm": 0.5086380243301392, |
|
"learning_rate": 1.9950000000000004e-05, |
|
"loss": 2.3289, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005, |
|
"grad_norm": 0.8586075305938721, |
|
"learning_rate": 1.9900000000000003e-05, |
|
"loss": 3.1847, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0075, |
|
"grad_norm": 0.7075896859169006, |
|
"learning_rate": 1.985e-05, |
|
"loss": 2.7698, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.41437870264053345, |
|
"learning_rate": 1.98e-05, |
|
"loss": 2.5479, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0125, |
|
"grad_norm": 0.4806264638900757, |
|
"learning_rate": 1.9750000000000002e-05, |
|
"loss": 2.6004, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.015, |
|
"grad_norm": 0.5133494734764099, |
|
"learning_rate": 1.97e-05, |
|
"loss": 2.7935, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0175, |
|
"grad_norm": 0.4955611824989319, |
|
"learning_rate": 1.9650000000000003e-05, |
|
"loss": 2.5868, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.5297004580497742, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 2.8002, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0225, |
|
"grad_norm": 0.3629765510559082, |
|
"learning_rate": 1.955e-05, |
|
"loss": 2.6765, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.025, |
|
"grad_norm": 0.40658852458000183, |
|
"learning_rate": 1.95e-05, |
|
"loss": 2.4437, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0275, |
|
"grad_norm": 0.7563017010688782, |
|
"learning_rate": 1.9450000000000002e-05, |
|
"loss": 2.7119, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.5557965636253357, |
|
"learning_rate": 1.94e-05, |
|
"loss": 3.0774, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0325, |
|
"grad_norm": 0.5341749787330627, |
|
"learning_rate": 1.9350000000000003e-05, |
|
"loss": 2.9765, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.035, |
|
"grad_norm": 0.3685285449028015, |
|
"learning_rate": 1.93e-05, |
|
"loss": 2.7085, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0375, |
|
"grad_norm": 0.5424759387969971, |
|
"learning_rate": 1.925e-05, |
|
"loss": 2.6293, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.4518626630306244, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 2.4085, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0425, |
|
"grad_norm": 0.3510679006576538, |
|
"learning_rate": 1.915e-05, |
|
"loss": 2.3645, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.045, |
|
"grad_norm": 0.5852112174034119, |
|
"learning_rate": 1.91e-05, |
|
"loss": 3.1263, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0475, |
|
"grad_norm": 0.9667254686355591, |
|
"learning_rate": 1.9050000000000002e-05, |
|
"loss": 3.5084, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.4582698941230774, |
|
"learning_rate": 1.9e-05, |
|
"loss": 3.012, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0525, |
|
"grad_norm": 0.6149416565895081, |
|
"learning_rate": 1.8950000000000003e-05, |
|
"loss": 2.7546, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.055, |
|
"grad_norm": 0.4512835144996643, |
|
"learning_rate": 1.8900000000000002e-05, |
|
"loss": 2.5626, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0575, |
|
"grad_norm": 0.739122748374939, |
|
"learning_rate": 1.885e-05, |
|
"loss": 2.9673, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.6391552686691284, |
|
"learning_rate": 1.88e-05, |
|
"loss": 3.0665, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0625, |
|
"grad_norm": 0.5419601202011108, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 2.3289, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.065, |
|
"grad_norm": 1.0033130645751953, |
|
"learning_rate": 1.8700000000000004e-05, |
|
"loss": 2.9624, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0675, |
|
"grad_norm": 0.6456469297409058, |
|
"learning_rate": 1.8650000000000003e-05, |
|
"loss": 2.3933, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.5043237209320068, |
|
"learning_rate": 1.86e-05, |
|
"loss": 2.8812, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0725, |
|
"grad_norm": 0.6468444466590881, |
|
"learning_rate": 1.855e-05, |
|
"loss": 2.7498, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.075, |
|
"grad_norm": 0.7163925766944885, |
|
"learning_rate": 1.8500000000000002e-05, |
|
"loss": 2.9495, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0775, |
|
"grad_norm": 0.8370097875595093, |
|
"learning_rate": 1.845e-05, |
|
"loss": 2.8212, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.48631641268730164, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 2.379, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0825, |
|
"grad_norm": 1.7928434610366821, |
|
"learning_rate": 1.8350000000000002e-05, |
|
"loss": 3.1519, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.085, |
|
"grad_norm": 0.6675847172737122, |
|
"learning_rate": 1.83e-05, |
|
"loss": 3.0783, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0875, |
|
"grad_norm": 0.7228886485099792, |
|
"learning_rate": 1.825e-05, |
|
"loss": 3.1486, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.744644820690155, |
|
"learning_rate": 1.8200000000000002e-05, |
|
"loss": 2.5763, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0925, |
|
"grad_norm": 2.355612277984619, |
|
"learning_rate": 1.815e-05, |
|
"loss": 3.3755, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.095, |
|
"grad_norm": 0.9405584335327148, |
|
"learning_rate": 1.8100000000000003e-05, |
|
"loss": 3.1682, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0975, |
|
"grad_norm": 0.7231965661048889, |
|
"learning_rate": 1.805e-05, |
|
"loss": 2.6062, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.8962283730506897, |
|
"learning_rate": 1.8e-05, |
|
"loss": 3.2604, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1025, |
|
"grad_norm": 0.4068661630153656, |
|
"learning_rate": 1.795e-05, |
|
"loss": 2.4424, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.105, |
|
"grad_norm": 0.8694533109664917, |
|
"learning_rate": 1.79e-05, |
|
"loss": 2.6876, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1075, |
|
"grad_norm": 0.624096691608429, |
|
"learning_rate": 1.785e-05, |
|
"loss": 2.3869, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.0791139602661133, |
|
"learning_rate": 1.7800000000000002e-05, |
|
"loss": 3.2583, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1125, |
|
"grad_norm": 0.711621880531311, |
|
"learning_rate": 1.775e-05, |
|
"loss": 2.7002, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.115, |
|
"grad_norm": 0.46927714347839355, |
|
"learning_rate": 1.77e-05, |
|
"loss": 2.3851, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.1175, |
|
"grad_norm": 0.7309519648551941, |
|
"learning_rate": 1.7650000000000002e-05, |
|
"loss": 2.3945, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.7145869135856628, |
|
"learning_rate": 1.76e-05, |
|
"loss": 2.4627, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1225, |
|
"grad_norm": 1.159073829650879, |
|
"learning_rate": 1.755e-05, |
|
"loss": 2.6211, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 0.8457475304603577, |
|
"learning_rate": 1.7500000000000002e-05, |
|
"loss": 2.7707, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1275, |
|
"grad_norm": 0.8868067264556885, |
|
"learning_rate": 1.7450000000000004e-05, |
|
"loss": 2.9123, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.7379621863365173, |
|
"learning_rate": 1.7400000000000003e-05, |
|
"loss": 2.7426, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.1325, |
|
"grad_norm": 0.7611985802650452, |
|
"learning_rate": 1.735e-05, |
|
"loss": 2.7205, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.135, |
|
"grad_norm": 0.4997944235801697, |
|
"learning_rate": 1.73e-05, |
|
"loss": 2.1109, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1375, |
|
"grad_norm": 0.7576261758804321, |
|
"learning_rate": 1.7250000000000003e-05, |
|
"loss": 2.9695, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.5478056073188782, |
|
"learning_rate": 1.72e-05, |
|
"loss": 2.6197, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.1425, |
|
"grad_norm": 0.6078264713287354, |
|
"learning_rate": 1.7150000000000004e-05, |
|
"loss": 2.4308, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.145, |
|
"grad_norm": 2.7619869709014893, |
|
"learning_rate": 1.7100000000000002e-05, |
|
"loss": 2.964, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.1475, |
|
"grad_norm": 0.7455095052719116, |
|
"learning_rate": 1.705e-05, |
|
"loss": 2.9467, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.794147789478302, |
|
"learning_rate": 1.7e-05, |
|
"loss": 2.8496, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1525, |
|
"grad_norm": 1.1698209047317505, |
|
"learning_rate": 1.6950000000000002e-05, |
|
"loss": 3.213, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.155, |
|
"grad_norm": 0.425815612077713, |
|
"learning_rate": 1.69e-05, |
|
"loss": 2.5624, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.1575, |
|
"grad_norm": 0.5705508589744568, |
|
"learning_rate": 1.6850000000000003e-05, |
|
"loss": 1.9561, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.791164755821228, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 2.8788, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.1625, |
|
"grad_norm": 0.539487361907959, |
|
"learning_rate": 1.675e-05, |
|
"loss": 2.9631, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.165, |
|
"grad_norm": 0.6165200471878052, |
|
"learning_rate": 1.67e-05, |
|
"loss": 2.731, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.1675, |
|
"grad_norm": 0.4418243169784546, |
|
"learning_rate": 1.665e-05, |
|
"loss": 2.7103, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.0445636510849, |
|
"learning_rate": 1.66e-05, |
|
"loss": 3.3579, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1725, |
|
"grad_norm": 1.2330797910690308, |
|
"learning_rate": 1.6550000000000002e-05, |
|
"loss": 2.9892, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.175, |
|
"grad_norm": 0.6254209876060486, |
|
"learning_rate": 1.65e-05, |
|
"loss": 2.5447, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1775, |
|
"grad_norm": 1.1451466083526611, |
|
"learning_rate": 1.645e-05, |
|
"loss": 2.9316, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.9813737869262695, |
|
"learning_rate": 1.64e-05, |
|
"loss": 2.6544, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.1825, |
|
"grad_norm": 0.6040698885917664, |
|
"learning_rate": 1.635e-05, |
|
"loss": 2.9732, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.185, |
|
"grad_norm": 1.079363465309143, |
|
"learning_rate": 1.63e-05, |
|
"loss": 2.7277, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1875, |
|
"grad_norm": 1.183262586593628, |
|
"learning_rate": 1.6250000000000002e-05, |
|
"loss": 3.4225, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.024534821510315, |
|
"learning_rate": 1.62e-05, |
|
"loss": 2.6733, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.1925, |
|
"grad_norm": 0.7848602533340454, |
|
"learning_rate": 1.6150000000000003e-05, |
|
"loss": 2.2153, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.195, |
|
"grad_norm": 0.9561752676963806, |
|
"learning_rate": 1.6100000000000002e-05, |
|
"loss": 2.868, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1975, |
|
"grad_norm": 0.7831237316131592, |
|
"learning_rate": 1.605e-05, |
|
"loss": 3.3643, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.0299983024597168, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 2.8384, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2025, |
|
"grad_norm": 0.7254953384399414, |
|
"learning_rate": 1.595e-05, |
|
"loss": 2.3374, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.205, |
|
"grad_norm": 1.1295499801635742, |
|
"learning_rate": 1.5900000000000004e-05, |
|
"loss": 2.7828, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2075, |
|
"grad_norm": 1.5047646760940552, |
|
"learning_rate": 1.5850000000000002e-05, |
|
"loss": 3.3818, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1679288148880005, |
|
"learning_rate": 1.58e-05, |
|
"loss": 2.8351, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.2125, |
|
"grad_norm": 1.0215470790863037, |
|
"learning_rate": 1.575e-05, |
|
"loss": 2.8807, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.215, |
|
"grad_norm": 0.8660721778869629, |
|
"learning_rate": 1.5700000000000002e-05, |
|
"loss": 2.9657, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2175, |
|
"grad_norm": 1.3509225845336914, |
|
"learning_rate": 1.565e-05, |
|
"loss": 3.2028, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.8548119068145752, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 2.7189, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.2225, |
|
"grad_norm": 0.6334153413772583, |
|
"learning_rate": 1.5550000000000002e-05, |
|
"loss": 2.7275, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.225, |
|
"grad_norm": 0.9550497531890869, |
|
"learning_rate": 1.55e-05, |
|
"loss": 2.6446, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2275, |
|
"grad_norm": 1.1148252487182617, |
|
"learning_rate": 1.545e-05, |
|
"loss": 2.5969, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.5404387712478638, |
|
"learning_rate": 1.54e-05, |
|
"loss": 2.9074, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.2325, |
|
"grad_norm": 0.5893303155899048, |
|
"learning_rate": 1.535e-05, |
|
"loss": 2.4446, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.235, |
|
"grad_norm": 0.8293983936309814, |
|
"learning_rate": 1.5300000000000003e-05, |
|
"loss": 2.881, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2375, |
|
"grad_norm": 0.647366464138031, |
|
"learning_rate": 1.525e-05, |
|
"loss": 2.303, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.5540935397148132, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 2.5093, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2425, |
|
"grad_norm": 0.8777411580085754, |
|
"learning_rate": 1.515e-05, |
|
"loss": 2.9024, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.245, |
|
"grad_norm": 1.4827839136123657, |
|
"learning_rate": 1.5100000000000001e-05, |
|
"loss": 3.5164, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2475, |
|
"grad_norm": 1.0348917245864868, |
|
"learning_rate": 1.505e-05, |
|
"loss": 3.2313, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.6850854754447937, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 2.4828, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2525, |
|
"grad_norm": 1.2222882509231567, |
|
"learning_rate": 1.4950000000000003e-05, |
|
"loss": 2.63, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.255, |
|
"grad_norm": 1.0515038967132568, |
|
"learning_rate": 1.4900000000000001e-05, |
|
"loss": 3.3098, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.2575, |
|
"grad_norm": 1.3674108982086182, |
|
"learning_rate": 1.4850000000000002e-05, |
|
"loss": 2.7831, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6067950129508972, |
|
"learning_rate": 1.48e-05, |
|
"loss": 2.4279, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.2625, |
|
"grad_norm": 1.3909555673599243, |
|
"learning_rate": 1.4750000000000003e-05, |
|
"loss": 2.6093, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.265, |
|
"grad_norm": 0.7833155393600464, |
|
"learning_rate": 1.4700000000000002e-05, |
|
"loss": 2.4604, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.2675, |
|
"grad_norm": 0.9903278946876526, |
|
"learning_rate": 1.4650000000000002e-05, |
|
"loss": 2.886, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.8194913268089294, |
|
"learning_rate": 1.46e-05, |
|
"loss": 2.7935, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.2725, |
|
"grad_norm": 1.1634082794189453, |
|
"learning_rate": 1.4550000000000001e-05, |
|
"loss": 3.3633, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.275, |
|
"grad_norm": 0.5949274897575378, |
|
"learning_rate": 1.45e-05, |
|
"loss": 2.2084, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2775, |
|
"grad_norm": 0.8783496618270874, |
|
"learning_rate": 1.4450000000000002e-05, |
|
"loss": 3.1123, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.2037030458450317, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 3.0479, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2825, |
|
"grad_norm": 0.7056751847267151, |
|
"learning_rate": 1.4350000000000002e-05, |
|
"loss": 2.2833, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.285, |
|
"grad_norm": 1.687018871307373, |
|
"learning_rate": 1.43e-05, |
|
"loss": 3.0165, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.2875, |
|
"grad_norm": 1.1513874530792236, |
|
"learning_rate": 1.425e-05, |
|
"loss": 2.8763, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.7570899128913879, |
|
"learning_rate": 1.4200000000000001e-05, |
|
"loss": 2.7981, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2925, |
|
"grad_norm": 1.3397407531738281, |
|
"learning_rate": 1.4150000000000002e-05, |
|
"loss": 2.7536, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.295, |
|
"grad_norm": 0.710766613483429, |
|
"learning_rate": 1.41e-05, |
|
"loss": 2.2904, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2975, |
|
"grad_norm": 1.6919655799865723, |
|
"learning_rate": 1.4050000000000001e-05, |
|
"loss": 3.1436, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.8802596926689148, |
|
"learning_rate": 1.4e-05, |
|
"loss": 2.969, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3025, |
|
"grad_norm": 1.3881422281265259, |
|
"learning_rate": 1.3950000000000002e-05, |
|
"loss": 2.7345, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.305, |
|
"grad_norm": 1.0002195835113525, |
|
"learning_rate": 1.39e-05, |
|
"loss": 2.5324, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.3075, |
|
"grad_norm": 0.7784033417701721, |
|
"learning_rate": 1.3850000000000001e-05, |
|
"loss": 2.4744, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.422712802886963, |
|
"learning_rate": 1.38e-05, |
|
"loss": 3.0338, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.3125, |
|
"grad_norm": 0.775004506111145, |
|
"learning_rate": 1.375e-05, |
|
"loss": 2.5653, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.315, |
|
"grad_norm": 0.8815636038780212, |
|
"learning_rate": 1.3700000000000003e-05, |
|
"loss": 2.7117, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.3175, |
|
"grad_norm": 0.7529321312904358, |
|
"learning_rate": 1.3650000000000001e-05, |
|
"loss": 2.2918, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1694319248199463, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 2.7757, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.3225, |
|
"grad_norm": 1.25980544090271, |
|
"learning_rate": 1.355e-05, |
|
"loss": 2.8813, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.325, |
|
"grad_norm": 0.8933326005935669, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 2.6694, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3275, |
|
"grad_norm": 1.1248258352279663, |
|
"learning_rate": 1.3450000000000002e-05, |
|
"loss": 2.9221, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1657884120941162, |
|
"learning_rate": 1.3400000000000002e-05, |
|
"loss": 2.7624, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.3325, |
|
"grad_norm": 0.6563082337379456, |
|
"learning_rate": 1.3350000000000001e-05, |
|
"loss": 2.6828, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.335, |
|
"grad_norm": 1.9324404001235962, |
|
"learning_rate": 1.3300000000000001e-05, |
|
"loss": 3.3839, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.3375, |
|
"grad_norm": 1.4225391149520874, |
|
"learning_rate": 1.325e-05, |
|
"loss": 3.0225, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.3360658884048462, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 2.8966, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.3425, |
|
"grad_norm": 0.8045865297317505, |
|
"learning_rate": 1.3150000000000001e-05, |
|
"loss": 2.7465, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.345, |
|
"grad_norm": 0.9894474744796753, |
|
"learning_rate": 1.3100000000000002e-05, |
|
"loss": 2.0347, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.3475, |
|
"grad_norm": 1.2224481105804443, |
|
"learning_rate": 1.305e-05, |
|
"loss": 2.5152, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.8339478373527527, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 2.8098, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3525, |
|
"grad_norm": 0.8256344795227051, |
|
"learning_rate": 1.295e-05, |
|
"loss": 2.6257, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.355, |
|
"grad_norm": 1.3551095724105835, |
|
"learning_rate": 1.2900000000000002e-05, |
|
"loss": 3.1719, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3575, |
|
"grad_norm": 0.7657787203788757, |
|
"learning_rate": 1.285e-05, |
|
"loss": 2.4577, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.8604536652565002, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 2.8134, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.3625, |
|
"grad_norm": 1.6100467443466187, |
|
"learning_rate": 1.275e-05, |
|
"loss": 2.9813, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.365, |
|
"grad_norm": 1.0728399753570557, |
|
"learning_rate": 1.27e-05, |
|
"loss": 2.9336, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.3675, |
|
"grad_norm": 0.7545064687728882, |
|
"learning_rate": 1.2650000000000001e-05, |
|
"loss": 2.4588, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.1295570135116577, |
|
"learning_rate": 1.2600000000000001e-05, |
|
"loss": 2.1661, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.3725, |
|
"grad_norm": 1.0526827573776245, |
|
"learning_rate": 1.255e-05, |
|
"loss": 2.6463, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 0.834685742855072, |
|
"learning_rate": 1.25e-05, |
|
"loss": 2.3916, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3775, |
|
"grad_norm": 0.6503292918205261, |
|
"learning_rate": 1.2450000000000003e-05, |
|
"loss": 2.3776, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1130701303482056, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 2.1929, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3825, |
|
"grad_norm": 1.4437247514724731, |
|
"learning_rate": 1.2350000000000002e-05, |
|
"loss": 3.055, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.385, |
|
"grad_norm": 1.0600138902664185, |
|
"learning_rate": 1.23e-05, |
|
"loss": 2.7314, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.3875, |
|
"grad_norm": 0.8913452625274658, |
|
"learning_rate": 1.2250000000000001e-05, |
|
"loss": 2.519, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.1115096807479858, |
|
"learning_rate": 1.22e-05, |
|
"loss": 2.2607, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.3925, |
|
"grad_norm": 1.2785054445266724, |
|
"learning_rate": 1.2150000000000002e-05, |
|
"loss": 3.0773, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.395, |
|
"grad_norm": 1.3286359310150146, |
|
"learning_rate": 1.2100000000000001e-05, |
|
"loss": 2.9653, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.3975, |
|
"grad_norm": 1.0843842029571533, |
|
"learning_rate": 1.2050000000000002e-05, |
|
"loss": 2.5038, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.6999257206916809, |
|
"learning_rate": 1.2e-05, |
|
"loss": 2.2963, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4025, |
|
"grad_norm": 1.3845021724700928, |
|
"learning_rate": 1.195e-05, |
|
"loss": 2.0538, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.405, |
|
"grad_norm": 0.8387379050254822, |
|
"learning_rate": 1.1900000000000001e-05, |
|
"loss": 2.6954, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.4075, |
|
"grad_norm": 1.5070502758026123, |
|
"learning_rate": 1.1850000000000002e-05, |
|
"loss": 3.4584, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.3833943605422974, |
|
"learning_rate": 1.18e-05, |
|
"loss": 2.603, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.4125, |
|
"grad_norm": 1.2640782594680786, |
|
"learning_rate": 1.1750000000000001e-05, |
|
"loss": 2.6054, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.415, |
|
"grad_norm": 1.2189885377883911, |
|
"learning_rate": 1.17e-05, |
|
"loss": 2.5489, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.4175, |
|
"grad_norm": 0.9442604184150696, |
|
"learning_rate": 1.1650000000000002e-05, |
|
"loss": 2.9024, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.966208815574646, |
|
"learning_rate": 1.16e-05, |
|
"loss": 2.6491, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.4225, |
|
"grad_norm": 1.126082181930542, |
|
"learning_rate": 1.1550000000000001e-05, |
|
"loss": 2.789, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.425, |
|
"grad_norm": 0.9639744162559509, |
|
"learning_rate": 1.15e-05, |
|
"loss": 2.9103, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.4275, |
|
"grad_norm": 1.8708701133728027, |
|
"learning_rate": 1.145e-05, |
|
"loss": 3.8013, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.8368299603462219, |
|
"learning_rate": 1.14e-05, |
|
"loss": 2.8514, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.4325, |
|
"grad_norm": 0.9027671813964844, |
|
"learning_rate": 1.1350000000000001e-05, |
|
"loss": 2.8475, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.435, |
|
"grad_norm": 0.7692644596099854, |
|
"learning_rate": 1.13e-05, |
|
"loss": 2.4503, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.4375, |
|
"grad_norm": 0.9505887627601624, |
|
"learning_rate": 1.125e-05, |
|
"loss": 2.3865, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.6154547333717346, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 2.2003, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.4425, |
|
"grad_norm": 0.9452154040336609, |
|
"learning_rate": 1.1150000000000002e-05, |
|
"loss": 2.5568, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.445, |
|
"grad_norm": 0.932019054889679, |
|
"learning_rate": 1.1100000000000002e-05, |
|
"loss": 2.7934, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.4475, |
|
"grad_norm": 0.8905855417251587, |
|
"learning_rate": 1.1050000000000001e-05, |
|
"loss": 2.5882, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.787998378276825, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 2.3595, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.4525, |
|
"grad_norm": 1.048705816268921, |
|
"learning_rate": 1.095e-05, |
|
"loss": 2.344, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.455, |
|
"grad_norm": 1.480038046836853, |
|
"learning_rate": 1.0900000000000002e-05, |
|
"loss": 2.6574, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.4575, |
|
"grad_norm": 1.0186829566955566, |
|
"learning_rate": 1.0850000000000001e-05, |
|
"loss": 2.5634, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0724053382873535, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 2.3537, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.4625, |
|
"grad_norm": 1.1065270900726318, |
|
"learning_rate": 1.075e-05, |
|
"loss": 2.3363, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.465, |
|
"grad_norm": 1.0963391065597534, |
|
"learning_rate": 1.0700000000000001e-05, |
|
"loss": 2.8301, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.4675, |
|
"grad_norm": 1.0181528329849243, |
|
"learning_rate": 1.065e-05, |
|
"loss": 2.3469, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9789475202560425, |
|
"learning_rate": 1.0600000000000002e-05, |
|
"loss": 2.4968, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.4725, |
|
"grad_norm": 0.9361099600791931, |
|
"learning_rate": 1.055e-05, |
|
"loss": 2.7079, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.475, |
|
"grad_norm": 1.1060682535171509, |
|
"learning_rate": 1.0500000000000001e-05, |
|
"loss": 2.6673, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4775, |
|
"grad_norm": 0.7775200605392456, |
|
"learning_rate": 1.045e-05, |
|
"loss": 2.4824, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.3691951036453247, |
|
"learning_rate": 1.04e-05, |
|
"loss": 2.8986, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.4825, |
|
"grad_norm": 1.13148033618927, |
|
"learning_rate": 1.0350000000000001e-05, |
|
"loss": 2.524, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.485, |
|
"grad_norm": 1.0336346626281738, |
|
"learning_rate": 1.0300000000000001e-05, |
|
"loss": 2.5613, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.4875, |
|
"grad_norm": 1.716963768005371, |
|
"learning_rate": 1.025e-05, |
|
"loss": 3.1095, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.8384996056556702, |
|
"learning_rate": 1.02e-05, |
|
"loss": 3.0394, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.4925, |
|
"grad_norm": 1.4971530437469482, |
|
"learning_rate": 1.015e-05, |
|
"loss": 3.0766, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.495, |
|
"grad_norm": 1.282583236694336, |
|
"learning_rate": 1.0100000000000002e-05, |
|
"loss": 2.016, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.4975, |
|
"grad_norm": 0.7084432244300842, |
|
"learning_rate": 1.005e-05, |
|
"loss": 2.4533, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.0756547451019287, |
|
"learning_rate": 1e-05, |
|
"loss": 2.8562, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5025, |
|
"grad_norm": 1.582619071006775, |
|
"learning_rate": 9.950000000000001e-06, |
|
"loss": 3.0876, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.505, |
|
"grad_norm": 1.0258393287658691, |
|
"learning_rate": 9.9e-06, |
|
"loss": 2.4419, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.5075, |
|
"grad_norm": 1.2243763208389282, |
|
"learning_rate": 9.85e-06, |
|
"loss": 2.6982, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9121052622795105, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 2.7505, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.5125, |
|
"grad_norm": 1.0946589708328247, |
|
"learning_rate": 9.75e-06, |
|
"loss": 2.5225, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.515, |
|
"grad_norm": 1.5404136180877686, |
|
"learning_rate": 9.7e-06, |
|
"loss": 2.7569, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.5175, |
|
"grad_norm": 2.3309805393218994, |
|
"learning_rate": 9.65e-06, |
|
"loss": 3.2281, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.3618017435073853, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 2.4756, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.5225, |
|
"grad_norm": 1.309582233428955, |
|
"learning_rate": 9.55e-06, |
|
"loss": 2.9163, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.525, |
|
"grad_norm": 1.01054048538208, |
|
"learning_rate": 9.5e-06, |
|
"loss": 2.5883, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.5275, |
|
"grad_norm": 1.077317476272583, |
|
"learning_rate": 9.450000000000001e-06, |
|
"loss": 2.7063, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.1660407781600952, |
|
"learning_rate": 9.4e-06, |
|
"loss": 2.2638, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.5325, |
|
"grad_norm": 1.070369839668274, |
|
"learning_rate": 9.350000000000002e-06, |
|
"loss": 2.5768, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.535, |
|
"grad_norm": 1.653826355934143, |
|
"learning_rate": 9.3e-06, |
|
"loss": 3.2656, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.5375, |
|
"grad_norm": 1.7478209733963013, |
|
"learning_rate": 9.250000000000001e-06, |
|
"loss": 3.1918, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.5557425022125244, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 3.317, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.5425, |
|
"grad_norm": 0.7344517707824707, |
|
"learning_rate": 9.15e-06, |
|
"loss": 2.444, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.545, |
|
"grad_norm": 1.3143254518508911, |
|
"learning_rate": 9.100000000000001e-06, |
|
"loss": 2.5553, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.5475, |
|
"grad_norm": 0.7797951698303223, |
|
"learning_rate": 9.050000000000001e-06, |
|
"loss": 2.1852, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.4703871011734009, |
|
"learning_rate": 9e-06, |
|
"loss": 2.4204, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.5525, |
|
"grad_norm": 1.1021125316619873, |
|
"learning_rate": 8.95e-06, |
|
"loss": 3.0355, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.555, |
|
"grad_norm": 1.8875423669815063, |
|
"learning_rate": 8.900000000000001e-06, |
|
"loss": 2.5834, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.5575, |
|
"grad_norm": 1.5867810249328613, |
|
"learning_rate": 8.85e-06, |
|
"loss": 2.9359, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.0237711668014526, |
|
"learning_rate": 8.8e-06, |
|
"loss": 2.7608, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.5625, |
|
"grad_norm": 0.7836809754371643, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 2.5303, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.565, |
|
"grad_norm": 0.8862155079841614, |
|
"learning_rate": 8.700000000000001e-06, |
|
"loss": 2.7468, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.5675, |
|
"grad_norm": 1.3247358798980713, |
|
"learning_rate": 8.65e-06, |
|
"loss": 2.501, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.2541866302490234, |
|
"learning_rate": 8.6e-06, |
|
"loss": 2.5587, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.5725, |
|
"grad_norm": 0.7479645013809204, |
|
"learning_rate": 8.550000000000001e-06, |
|
"loss": 2.2071, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.575, |
|
"grad_norm": 1.4009301662445068, |
|
"learning_rate": 8.5e-06, |
|
"loss": 2.9907, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5775, |
|
"grad_norm": 1.2885963916778564, |
|
"learning_rate": 8.45e-06, |
|
"loss": 2.4376, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.4628698825836182, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 2.378, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.5825, |
|
"grad_norm": 1.5401051044464111, |
|
"learning_rate": 8.35e-06, |
|
"loss": 2.4052, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.585, |
|
"grad_norm": 1.3357149362564087, |
|
"learning_rate": 8.3e-06, |
|
"loss": 2.1, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.5875, |
|
"grad_norm": 0.828890323638916, |
|
"learning_rate": 8.25e-06, |
|
"loss": 2.1579, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.4550373554229736, |
|
"learning_rate": 8.2e-06, |
|
"loss": 3.0067, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.5925, |
|
"grad_norm": 1.0677258968353271, |
|
"learning_rate": 8.15e-06, |
|
"loss": 2.7238, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.595, |
|
"grad_norm": 0.9987130761146545, |
|
"learning_rate": 8.1e-06, |
|
"loss": 2.6472, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.5975, |
|
"grad_norm": 1.0116266012191772, |
|
"learning_rate": 8.050000000000001e-06, |
|
"loss": 2.806, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.281691312789917, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 2.7574, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.6025, |
|
"grad_norm": 1.1807749271392822, |
|
"learning_rate": 7.950000000000002e-06, |
|
"loss": 2.2562, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.605, |
|
"grad_norm": 0.8675217628479004, |
|
"learning_rate": 7.9e-06, |
|
"loss": 2.5751, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.6075, |
|
"grad_norm": 1.2328344583511353, |
|
"learning_rate": 7.850000000000001e-06, |
|
"loss": 2.753, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.0627636909484863, |
|
"learning_rate": 7.800000000000002e-06, |
|
"loss": 2.7131, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.6125, |
|
"grad_norm": 1.6751055717468262, |
|
"learning_rate": 7.75e-06, |
|
"loss": 3.2299, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.615, |
|
"grad_norm": 0.7668905258178711, |
|
"learning_rate": 7.7e-06, |
|
"loss": 2.7974, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.6175, |
|
"grad_norm": 1.6743037700653076, |
|
"learning_rate": 7.650000000000001e-06, |
|
"loss": 2.2918, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.9343725442886353, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 2.9281, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.6225, |
|
"grad_norm": 0.9057416319847107, |
|
"learning_rate": 7.5500000000000006e-06, |
|
"loss": 2.8679, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 1.1523922681808472, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 2.5185, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.6275, |
|
"grad_norm": 1.1341513395309448, |
|
"learning_rate": 7.450000000000001e-06, |
|
"loss": 2.0536, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.606443405151367, |
|
"learning_rate": 7.4e-06, |
|
"loss": 3.3494, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.6325, |
|
"grad_norm": 0.7551514506340027, |
|
"learning_rate": 7.350000000000001e-06, |
|
"loss": 2.4611, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.635, |
|
"grad_norm": 1.4550762176513672, |
|
"learning_rate": 7.3e-06, |
|
"loss": 2.6206, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.6375, |
|
"grad_norm": 1.5378979444503784, |
|
"learning_rate": 7.25e-06, |
|
"loss": 2.9399, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.8729074001312256, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 2.8517, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.6425, |
|
"grad_norm": 0.8619707822799683, |
|
"learning_rate": 7.15e-06, |
|
"loss": 2.7855, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.645, |
|
"grad_norm": 1.3895457983016968, |
|
"learning_rate": 7.100000000000001e-06, |
|
"loss": 2.8384, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.6475, |
|
"grad_norm": 1.3270301818847656, |
|
"learning_rate": 7.05e-06, |
|
"loss": 2.421, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.5464551448822021, |
|
"learning_rate": 7e-06, |
|
"loss": 2.7851, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.6525, |
|
"grad_norm": 0.8114046454429626, |
|
"learning_rate": 6.95e-06, |
|
"loss": 2.3022, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.655, |
|
"grad_norm": 1.6738479137420654, |
|
"learning_rate": 6.9e-06, |
|
"loss": 2.8703, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.6575, |
|
"grad_norm": 1.7005109786987305, |
|
"learning_rate": 6.850000000000001e-06, |
|
"loss": 2.7839, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8630906939506531, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 2.9775, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.6625, |
|
"grad_norm": 1.465364694595337, |
|
"learning_rate": 6.750000000000001e-06, |
|
"loss": 2.7382, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.665, |
|
"grad_norm": 1.5297225713729858, |
|
"learning_rate": 6.700000000000001e-06, |
|
"loss": 2.4168, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.6675, |
|
"grad_norm": 2.0528006553649902, |
|
"learning_rate": 6.650000000000001e-06, |
|
"loss": 3.1659, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.2365155220031738, |
|
"learning_rate": 6.600000000000001e-06, |
|
"loss": 2.2999, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.6725, |
|
"grad_norm": 1.0112578868865967, |
|
"learning_rate": 6.550000000000001e-06, |
|
"loss": 2.4545, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.675, |
|
"grad_norm": 1.8111435174942017, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 2.4034, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6775, |
|
"grad_norm": 1.0683218240737915, |
|
"learning_rate": 6.450000000000001e-06, |
|
"loss": 2.5293, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8343501687049866, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 2.2964, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.6825, |
|
"grad_norm": 0.7243903279304504, |
|
"learning_rate": 6.35e-06, |
|
"loss": 2.7022, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.685, |
|
"grad_norm": 0.7344633936882019, |
|
"learning_rate": 6.300000000000001e-06, |
|
"loss": 2.657, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.6875, |
|
"grad_norm": 0.8676360249519348, |
|
"learning_rate": 6.25e-06, |
|
"loss": 2.401, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8793882131576538, |
|
"learning_rate": 6.200000000000001e-06, |
|
"loss": 2.4567, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.6925, |
|
"grad_norm": 1.3876687288284302, |
|
"learning_rate": 6.15e-06, |
|
"loss": 2.4355, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.695, |
|
"grad_norm": 1.6055662631988525, |
|
"learning_rate": 6.1e-06, |
|
"loss": 2.813, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.6975, |
|
"grad_norm": 1.2520968914031982, |
|
"learning_rate": 6.0500000000000005e-06, |
|
"loss": 2.9245, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.8885823488235474, |
|
"learning_rate": 6e-06, |
|
"loss": 2.6111, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.7025, |
|
"grad_norm": 1.6512576341629028, |
|
"learning_rate": 5.950000000000001e-06, |
|
"loss": 2.9193, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.705, |
|
"grad_norm": 0.7407610416412354, |
|
"learning_rate": 5.9e-06, |
|
"loss": 2.1398, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.7075, |
|
"grad_norm": 2.1687231063842773, |
|
"learning_rate": 5.85e-06, |
|
"loss": 2.9846, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 2.4894940853118896, |
|
"learning_rate": 5.8e-06, |
|
"loss": 3.2657, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.7125, |
|
"grad_norm": 0.9962525367736816, |
|
"learning_rate": 5.75e-06, |
|
"loss": 2.5076, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.715, |
|
"grad_norm": 3.0593910217285156, |
|
"learning_rate": 5.7e-06, |
|
"loss": 3.4797, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.7175, |
|
"grad_norm": 1.8267052173614502, |
|
"learning_rate": 5.65e-06, |
|
"loss": 3.0976, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.4683566093444824, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 2.8335, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.7225, |
|
"grad_norm": 1.1253299713134766, |
|
"learning_rate": 5.550000000000001e-06, |
|
"loss": 2.6241, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.725, |
|
"grad_norm": 0.9860060214996338, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 2.4915, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.7275, |
|
"grad_norm": 1.5636118650436401, |
|
"learning_rate": 5.450000000000001e-06, |
|
"loss": 2.7474, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8544251918792725, |
|
"learning_rate": 5.400000000000001e-06, |
|
"loss": 2.5836, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.7325, |
|
"grad_norm": 1.4845181703567505, |
|
"learning_rate": 5.3500000000000004e-06, |
|
"loss": 2.5001, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.735, |
|
"grad_norm": 0.9682713747024536, |
|
"learning_rate": 5.300000000000001e-06, |
|
"loss": 2.3704, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.7375, |
|
"grad_norm": 1.139123558998108, |
|
"learning_rate": 5.2500000000000006e-06, |
|
"loss": 2.498, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9582972526550293, |
|
"learning_rate": 5.2e-06, |
|
"loss": 2.8401, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.7425, |
|
"grad_norm": 1.2214773893356323, |
|
"learning_rate": 5.150000000000001e-06, |
|
"loss": 2.4964, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.745, |
|
"grad_norm": 1.5916988849639893, |
|
"learning_rate": 5.1e-06, |
|
"loss": 2.789, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.7475, |
|
"grad_norm": 1.3175058364868164, |
|
"learning_rate": 5.050000000000001e-06, |
|
"loss": 2.2842, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.5208394527435303, |
|
"learning_rate": 5e-06, |
|
"loss": 2.6268, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.7525, |
|
"grad_norm": 1.6970012187957764, |
|
"learning_rate": 4.95e-06, |
|
"loss": 3.0989, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.755, |
|
"grad_norm": 1.0863839387893677, |
|
"learning_rate": 4.9000000000000005e-06, |
|
"loss": 2.5363, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.7575, |
|
"grad_norm": 1.4228333234786987, |
|
"learning_rate": 4.85e-06, |
|
"loss": 2.2384, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.1009355783462524, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 2.5118, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.7625, |
|
"grad_norm": 1.626090168952942, |
|
"learning_rate": 4.75e-06, |
|
"loss": 3.0346, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.765, |
|
"grad_norm": 1.7524985074996948, |
|
"learning_rate": 4.7e-06, |
|
"loss": 2.866, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.7675, |
|
"grad_norm": 0.9619413614273071, |
|
"learning_rate": 4.65e-06, |
|
"loss": 2.7082, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.0346368551254272, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 2.5831, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.7725, |
|
"grad_norm": 1.3571815490722656, |
|
"learning_rate": 4.5500000000000005e-06, |
|
"loss": 2.8175, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.775, |
|
"grad_norm": 0.9211217761039734, |
|
"learning_rate": 4.5e-06, |
|
"loss": 2.2162, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.7775, |
|
"grad_norm": 0.9217227101325989, |
|
"learning_rate": 4.450000000000001e-06, |
|
"loss": 2.5447, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.4442232847213745, |
|
"learning_rate": 4.4e-06, |
|
"loss": 2.5516, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.7825, |
|
"grad_norm": 2.173661470413208, |
|
"learning_rate": 4.350000000000001e-06, |
|
"loss": 2.8237, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.785, |
|
"grad_norm": 1.0328022241592407, |
|
"learning_rate": 4.3e-06, |
|
"loss": 2.5789, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.7875, |
|
"grad_norm": 1.6845656633377075, |
|
"learning_rate": 4.25e-06, |
|
"loss": 2.7861, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.003690242767334, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 2.8247, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.7925, |
|
"grad_norm": 0.9073178172111511, |
|
"learning_rate": 4.15e-06, |
|
"loss": 2.0476, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.795, |
|
"grad_norm": 1.2052966356277466, |
|
"learning_rate": 4.1e-06, |
|
"loss": 2.577, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.7975, |
|
"grad_norm": 1.3335316181182861, |
|
"learning_rate": 4.05e-06, |
|
"loss": 2.871, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.2898180484771729, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 2.7909, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.8025, |
|
"grad_norm": 0.8960414528846741, |
|
"learning_rate": 3.95e-06, |
|
"loss": 2.4136, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.805, |
|
"grad_norm": 1.0490525960922241, |
|
"learning_rate": 3.900000000000001e-06, |
|
"loss": 2.1799, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.8075, |
|
"grad_norm": 1.658100962638855, |
|
"learning_rate": 3.85e-06, |
|
"loss": 2.7796, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.0821839570999146, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 2.7519, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.8125, |
|
"grad_norm": 1.4786803722381592, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 2.7704, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.815, |
|
"grad_norm": 1.3606433868408203, |
|
"learning_rate": 3.7e-06, |
|
"loss": 2.7433, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.8175, |
|
"grad_norm": 1.033447265625, |
|
"learning_rate": 3.65e-06, |
|
"loss": 2.8468, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 2.0277504920959473, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 3.0979, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.8225, |
|
"grad_norm": 1.764878749847412, |
|
"learning_rate": 3.5500000000000003e-06, |
|
"loss": 2.6579, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.825, |
|
"grad_norm": 1.2304050922393799, |
|
"learning_rate": 3.5e-06, |
|
"loss": 3.1515, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.8275, |
|
"grad_norm": 1.0954989194869995, |
|
"learning_rate": 3.45e-06, |
|
"loss": 2.4466, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.0179721117019653, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 2.6192, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.8325, |
|
"grad_norm": 2.0626888275146484, |
|
"learning_rate": 3.3500000000000005e-06, |
|
"loss": 3.1367, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.835, |
|
"grad_norm": 1.400158405303955, |
|
"learning_rate": 3.3000000000000006e-06, |
|
"loss": 2.3225, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.8375, |
|
"grad_norm": 1.2611231803894043, |
|
"learning_rate": 3.2500000000000002e-06, |
|
"loss": 2.6106, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.3621522188186646, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 2.3755, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.8425, |
|
"grad_norm": 1.694920539855957, |
|
"learning_rate": 3.1500000000000003e-06, |
|
"loss": 3.0504, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.845, |
|
"grad_norm": 1.3278639316558838, |
|
"learning_rate": 3.1000000000000004e-06, |
|
"loss": 2.7587, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.8475, |
|
"grad_norm": 1.1720857620239258, |
|
"learning_rate": 3.05e-06, |
|
"loss": 2.4512, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.203119158744812, |
|
"learning_rate": 3e-06, |
|
"loss": 2.5373, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.8525, |
|
"grad_norm": 1.8672972917556763, |
|
"learning_rate": 2.95e-06, |
|
"loss": 2.8789, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.855, |
|
"grad_norm": 1.5473331212997437, |
|
"learning_rate": 2.9e-06, |
|
"loss": 2.511, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.8575, |
|
"grad_norm": 1.230299711227417, |
|
"learning_rate": 2.85e-06, |
|
"loss": 2.4508, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 2.5387115478515625, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 3.2151, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.8625, |
|
"grad_norm": 0.8814523816108704, |
|
"learning_rate": 2.7500000000000004e-06, |
|
"loss": 2.5838, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.865, |
|
"grad_norm": 1.4973145723342896, |
|
"learning_rate": 2.7000000000000004e-06, |
|
"loss": 2.529, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.8675, |
|
"grad_norm": 0.9681499004364014, |
|
"learning_rate": 2.6500000000000005e-06, |
|
"loss": 2.5989, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.991982638835907, |
|
"learning_rate": 2.6e-06, |
|
"loss": 2.7449, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.8725, |
|
"grad_norm": 1.0481587648391724, |
|
"learning_rate": 2.55e-06, |
|
"loss": 2.4512, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 1.8113739490509033, |
|
"learning_rate": 2.5e-06, |
|
"loss": 2.6997, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.8775, |
|
"grad_norm": 1.1605638265609741, |
|
"learning_rate": 2.4500000000000003e-06, |
|
"loss": 2.4487, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.8704707622528076, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 2.5544, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.8825, |
|
"grad_norm": 1.5997426509857178, |
|
"learning_rate": 2.35e-06, |
|
"loss": 2.079, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.885, |
|
"grad_norm": 0.9046956300735474, |
|
"learning_rate": 2.3000000000000004e-06, |
|
"loss": 2.6991, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.8875, |
|
"grad_norm": 0.8987280130386353, |
|
"learning_rate": 2.25e-06, |
|
"loss": 2.3281, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 2.1055095195770264, |
|
"learning_rate": 2.2e-06, |
|
"loss": 2.942, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.8925, |
|
"grad_norm": 1.0179944038391113, |
|
"learning_rate": 2.15e-06, |
|
"loss": 2.6226, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.895, |
|
"grad_norm": 0.8908201456069946, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 2.5437, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.8975, |
|
"grad_norm": 0.9919204711914062, |
|
"learning_rate": 2.05e-06, |
|
"loss": 2.6167, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.3533128499984741, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.7656, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.9025, |
|
"grad_norm": 1.5116089582443237, |
|
"learning_rate": 1.9500000000000004e-06, |
|
"loss": 2.7985, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.905, |
|
"grad_norm": 1.1233296394348145, |
|
"learning_rate": 1.9000000000000002e-06, |
|
"loss": 2.4902, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.9075, |
|
"grad_norm": 1.3624063730239868, |
|
"learning_rate": 1.85e-06, |
|
"loss": 2.4763, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.012367606163025, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 2.6233, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.9125, |
|
"grad_norm": 1.2850728034973145, |
|
"learning_rate": 1.75e-06, |
|
"loss": 2.6726, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.915, |
|
"grad_norm": 1.139714002609253, |
|
"learning_rate": 1.7000000000000002e-06, |
|
"loss": 2.4277, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.9175, |
|
"grad_norm": 1.702054500579834, |
|
"learning_rate": 1.6500000000000003e-06, |
|
"loss": 2.784, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.5244337320327759, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 2.4983, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.9225, |
|
"grad_norm": 0.990729808807373, |
|
"learning_rate": 1.5500000000000002e-06, |
|
"loss": 2.6408, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.925, |
|
"grad_norm": 1.6035521030426025, |
|
"learning_rate": 1.5e-06, |
|
"loss": 2.7656, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.9275, |
|
"grad_norm": 1.727439045906067, |
|
"learning_rate": 1.45e-06, |
|
"loss": 2.2137, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.45046865940094, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 2.4762, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.9325, |
|
"grad_norm": 1.8131942749023438, |
|
"learning_rate": 1.3500000000000002e-06, |
|
"loss": 2.9507, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.935, |
|
"grad_norm": 1.0772196054458618, |
|
"learning_rate": 1.3e-06, |
|
"loss": 2.2929, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.9375, |
|
"grad_norm": 0.9250187277793884, |
|
"learning_rate": 1.25e-06, |
|
"loss": 2.5877, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.2243655920028687, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 2.4348, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.9425, |
|
"grad_norm": 1.745672345161438, |
|
"learning_rate": 1.1500000000000002e-06, |
|
"loss": 2.9214, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.945, |
|
"grad_norm": 1.6701159477233887, |
|
"learning_rate": 1.1e-06, |
|
"loss": 2.6747, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.9475, |
|
"grad_norm": 1.5345369577407837, |
|
"learning_rate": 1.0500000000000001e-06, |
|
"loss": 3.255, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.5753200054168701, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 2.3063, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.9525, |
|
"grad_norm": 0.9224236011505127, |
|
"learning_rate": 9.500000000000001e-07, |
|
"loss": 2.8304, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.955, |
|
"grad_norm": 2.8468313217163086, |
|
"learning_rate": 9.000000000000001e-07, |
|
"loss": 2.9737, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.9575, |
|
"grad_norm": 1.5169612169265747, |
|
"learning_rate": 8.500000000000001e-07, |
|
"loss": 2.5457, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.4313298463821411, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 2.7448, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.9625, |
|
"grad_norm": 0.887591540813446, |
|
"learning_rate": 7.5e-07, |
|
"loss": 2.7407, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.965, |
|
"grad_norm": 1.2013964653015137, |
|
"learning_rate": 7.000000000000001e-07, |
|
"loss": 2.8123, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.9675, |
|
"grad_norm": 1.089685082435608, |
|
"learning_rate": 6.5e-07, |
|
"loss": 2.735, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.808695673942566, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 3.1463, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.9725, |
|
"grad_norm": 0.9855251908302307, |
|
"learning_rate": 5.5e-07, |
|
"loss": 2.662, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.975, |
|
"grad_norm": 0.8635110259056091, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 2.6258, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.9775, |
|
"grad_norm": 0.9996857047080994, |
|
"learning_rate": 4.5000000000000003e-07, |
|
"loss": 2.4871, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.132926344871521, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 2.3769, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.9825, |
|
"grad_norm": 1.0772877931594849, |
|
"learning_rate": 3.5000000000000004e-07, |
|
"loss": 2.7185, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.985, |
|
"grad_norm": 0.8307743668556213, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 2.357, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.9875, |
|
"grad_norm": 0.8545295000076294, |
|
"learning_rate": 2.5000000000000004e-07, |
|
"loss": 2.4623, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.0141770839691162, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 2.6182, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.9925, |
|
"grad_norm": 1.289717435836792, |
|
"learning_rate": 1.5000000000000002e-07, |
|
"loss": 2.5772, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.995, |
|
"grad_norm": 1.5395151376724243, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 2.6248, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.9975, |
|
"grad_norm": 1.0455236434936523, |
|
"learning_rate": 5.0000000000000004e-08, |
|
"loss": 2.5632, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.0527230501174927, |
|
"learning_rate": 0.0, |
|
"loss": 2.5029, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_runtime": 66.9763, |
|
"eval_samples_per_second": 2.986, |
|
"eval_steps_per_second": 1.493, |
|
"step": 400 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 400, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|