|
{ |
|
"best_metric": 0.2225116640329361, |
|
"best_model_checkpoint": "loras/RTE-lora-results/checkpoint-500", |
|
"epoch": 7.211538461538462, |
|
"eval_steps": 500, |
|
"global_step": 1500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5e-06, |
|
"loss": 0.2524, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.2422, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.2129, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2744, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.2803, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3e-05, |
|
"loss": 0.2949, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.2074, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4e-05, |
|
"loss": 0.2353, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.5e-05, |
|
"loss": 0.2032, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5e-05, |
|
"loss": 0.2756, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.974747474747475e-05, |
|
"loss": 0.2318, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.94949494949495e-05, |
|
"loss": 0.259, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.9242424242424245e-05, |
|
"loss": 0.2474, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.898989898989899e-05, |
|
"loss": 0.2098, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.8737373737373736e-05, |
|
"loss": 0.2425, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.848484848484849e-05, |
|
"loss": 0.1971, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.823232323232323e-05, |
|
"loss": 0.2563, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.797979797979798e-05, |
|
"loss": 0.2695, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.772727272727273e-05, |
|
"loss": 0.2235, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.7474747474747476e-05, |
|
"loss": 0.2412, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.722222222222222e-05, |
|
"loss": 0.2255, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.696969696969697e-05, |
|
"loss": 0.2465, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.671717171717172e-05, |
|
"loss": 0.1868, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.6464646464646464e-05, |
|
"loss": 0.2158, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.621212121212121e-05, |
|
"loss": 0.2469, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.595959595959596e-05, |
|
"loss": 0.2274, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.5707070707070706e-05, |
|
"loss": 0.2137, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 0.1944, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.5202020202020204e-05, |
|
"loss": 0.2389, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.494949494949495e-05, |
|
"loss": 0.2667, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.46969696969697e-05, |
|
"loss": 0.2591, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.2623, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.41919191919192e-05, |
|
"loss": 0.2651, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.3939393939393944e-05, |
|
"loss": 0.2339, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.368686868686869e-05, |
|
"loss": 0.234, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.343434343434344e-05, |
|
"loss": 0.2506, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.318181818181819e-05, |
|
"loss": 0.2703, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.292929292929293e-05, |
|
"loss": 0.2095, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.267676767676768e-05, |
|
"loss": 0.2501, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.242424242424243e-05, |
|
"loss": 0.1951, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.2171717171717175e-05, |
|
"loss": 0.2671, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.191919191919192e-05, |
|
"loss": 0.2179, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 0.2111, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.141414141414142e-05, |
|
"loss": 0.2502, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.116161616161616e-05, |
|
"loss": 0.1938, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.0909090909090915e-05, |
|
"loss": 0.2539, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.065656565656566e-05, |
|
"loss": 0.2325, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.0404040404040405e-05, |
|
"loss": 0.1858, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.015151515151515e-05, |
|
"loss": 0.2226, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 3.98989898989899e-05, |
|
"loss": 0.2351, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"eval_MCC": 0.5743913241540748, |
|
"eval_accuracy": 0.7833935018050542, |
|
"eval_f1-score": 0.7794791361410579, |
|
"eval_loss": 0.2225116640329361, |
|
"eval_runtime": 6.5816, |
|
"eval_samples_per_second": 42.087, |
|
"eval_steps_per_second": 1.823, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.964646464646465e-05, |
|
"loss": 0.2408, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.939393939393939e-05, |
|
"loss": 0.2224, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 3.9141414141414145e-05, |
|
"loss": 0.26, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 0.2238, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 3.8636363636363636e-05, |
|
"loss": 0.2423, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 3.838383838383838e-05, |
|
"loss": 0.2227, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.8131313131313133e-05, |
|
"loss": 0.2635, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.787878787878788e-05, |
|
"loss": 0.236, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.7626262626262624e-05, |
|
"loss": 0.2339, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.7373737373737376e-05, |
|
"loss": 0.2831, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.712121212121212e-05, |
|
"loss": 0.1818, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.686868686868687e-05, |
|
"loss": 0.2263, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.661616161616162e-05, |
|
"loss": 0.2271, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.6363636363636364e-05, |
|
"loss": 0.2386, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.611111111111111e-05, |
|
"loss": 0.2285, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.5858585858585855e-05, |
|
"loss": 0.1966, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.560606060606061e-05, |
|
"loss": 0.2553, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.535353535353535e-05, |
|
"loss": 0.2091, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.5101010101010104e-05, |
|
"loss": 0.2359, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.484848484848485e-05, |
|
"loss": 0.2194, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.45959595959596e-05, |
|
"loss": 0.212, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 3.434343434343435e-05, |
|
"loss": 0.2334, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 3.409090909090909e-05, |
|
"loss": 0.2359, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 3.3838383838383844e-05, |
|
"loss": 0.1982, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.358585858585859e-05, |
|
"loss": 0.2275, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.1935, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.308080808080809e-05, |
|
"loss": 0.2083, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.282828282828283e-05, |
|
"loss": 0.2164, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.257575757575758e-05, |
|
"loss": 0.2011, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.232323232323233e-05, |
|
"loss": 0.1781, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 3.2070707070707075e-05, |
|
"loss": 0.2711, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 3.181818181818182e-05, |
|
"loss": 0.2304, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 3.1565656565656566e-05, |
|
"loss": 0.2696, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 3.131313131313132e-05, |
|
"loss": 0.1603, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 3.106060606060606e-05, |
|
"loss": 0.2524, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.080808080808081e-05, |
|
"loss": 0.2174, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 3.055555555555556e-05, |
|
"loss": 0.2224, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 3.0303030303030306e-05, |
|
"loss": 0.2071, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 3.005050505050505e-05, |
|
"loss": 0.2331, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 2.9797979797979796e-05, |
|
"loss": 0.1696, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.954545454545455e-05, |
|
"loss": 0.2193, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 2.9292929292929294e-05, |
|
"loss": 0.2337, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 2.904040404040404e-05, |
|
"loss": 0.2351, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 2.878787878787879e-05, |
|
"loss": 0.1655, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 2.8535353535353536e-05, |
|
"loss": 0.2151, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 2.8282828282828282e-05, |
|
"loss": 0.2667, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.803030303030303e-05, |
|
"loss": 0.2228, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.2036, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.7525252525252528e-05, |
|
"loss": 0.2031, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 2.7272727272727273e-05, |
|
"loss": 0.2193, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"eval_MCC": 0.6229024526089935, |
|
"eval_accuracy": 0.8086642599277978, |
|
"eval_f1-score": 0.8061174971587979, |
|
"eval_loss": 0.21737736463546753, |
|
"eval_runtime": 4.873, |
|
"eval_samples_per_second": 56.844, |
|
"eval_steps_per_second": 2.463, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 2.7020202020202022e-05, |
|
"loss": 0.2319, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.676767676767677e-05, |
|
"loss": 0.2138, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 2.6515151515151516e-05, |
|
"loss": 0.2005, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.6262626262626268e-05, |
|
"loss": 0.2562, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.6010101010101013e-05, |
|
"loss": 0.1982, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 2.575757575757576e-05, |
|
"loss": 0.283, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 2.5505050505050504e-05, |
|
"loss": 0.1957, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 2.5252525252525256e-05, |
|
"loss": 0.2135, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.2646, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 2.474747474747475e-05, |
|
"loss": 0.1809, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 2.4494949494949495e-05, |
|
"loss": 0.2026, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 2.4242424242424244e-05, |
|
"loss": 0.2504, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 2.398989898989899e-05, |
|
"loss": 0.2255, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 2.3737373737373738e-05, |
|
"loss": 0.2645, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 2.3484848484848487e-05, |
|
"loss": 0.2531, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 2.3232323232323232e-05, |
|
"loss": 0.2036, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 2.297979797979798e-05, |
|
"loss": 0.2628, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 0.195, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 2.2474747474747475e-05, |
|
"loss": 0.2283, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.2091, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 2.1969696969696972e-05, |
|
"loss": 0.2065, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 2.171717171717172e-05, |
|
"loss": 0.1773, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 2.1464646464646466e-05, |
|
"loss": 0.2471, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 2.1212121212121215e-05, |
|
"loss": 0.215, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 2.095959595959596e-05, |
|
"loss": 0.1833, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 2.070707070707071e-05, |
|
"loss": 0.1981, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 2.0454545454545457e-05, |
|
"loss": 0.2149, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 2.0202020202020203e-05, |
|
"loss": 0.1759, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 1.994949494949495e-05, |
|
"loss": 0.2351, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 1.9696969696969697e-05, |
|
"loss": 0.2085, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 1.9444444444444445e-05, |
|
"loss": 0.2169, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 1.919191919191919e-05, |
|
"loss": 0.2034, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 1.893939393939394e-05, |
|
"loss": 0.2673, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 1.8686868686868688e-05, |
|
"loss": 0.1767, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.8434343434343433e-05, |
|
"loss": 0.2386, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 0.2157, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 1.7929292929292927e-05, |
|
"loss": 0.2151, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 1.7676767676767676e-05, |
|
"loss": 0.2447, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 1.7424242424242425e-05, |
|
"loss": 0.2385, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.7171717171717173e-05, |
|
"loss": 0.1787, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.6919191919191922e-05, |
|
"loss": 0.1873, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.2398, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.6414141414141416e-05, |
|
"loss": 0.1952, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.6161616161616165e-05, |
|
"loss": 0.2513, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.590909090909091e-05, |
|
"loss": 0.1848, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 1.565656565656566e-05, |
|
"loss": 0.2341, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 1.5404040404040404e-05, |
|
"loss": 0.1803, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 1.5151515151515153e-05, |
|
"loss": 0.2258, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 1.4898989898989898e-05, |
|
"loss": 0.2159, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 1.4646464646464647e-05, |
|
"loss": 0.2416, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"eval_MCC": 0.6271860965381456, |
|
"eval_accuracy": 0.8122743682310469, |
|
"eval_f1-score": 0.8105339578920151, |
|
"eval_loss": 0.21340395510196686, |
|
"eval_runtime": 4.8244, |
|
"eval_samples_per_second": 57.416, |
|
"eval_steps_per_second": 2.487, |
|
"step": 1500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2080, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"total_flos": 6172840372469760.0, |
|
"train_batch_size": 12, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|