|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9969356486210419, |
|
"eval_steps": 82, |
|
"global_step": 244, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0040858018386108275, |
|
"grad_norm": 4.75867223739624, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 1.3989, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0040858018386108275, |
|
"eval_loss": 1.7111468315124512, |
|
"eval_runtime": 5.4436, |
|
"eval_samples_per_second": 14.512, |
|
"eval_steps_per_second": 1.837, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008171603677221655, |
|
"grad_norm": 4.975377559661865, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 1.4837, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.012257405515832482, |
|
"grad_norm": 5.219729900360107, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.5181, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01634320735444331, |
|
"grad_norm": 4.57335901260376, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 1.4106, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.020429009193054137, |
|
"grad_norm": 3.840559720993042, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.3763, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.024514811031664963, |
|
"grad_norm": 3.2056212425231934, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.1876, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.028600612870275793, |
|
"grad_norm": 2.6987595558166504, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 1.2154, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03268641470888662, |
|
"grad_norm": 2.378502130508423, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 1.1594, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03677221654749745, |
|
"grad_norm": 1.7688865661621094, |
|
"learning_rate": 6e-06, |
|
"loss": 0.8435, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04085801838610827, |
|
"grad_norm": 1.3263744115829468, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.7219, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0449438202247191, |
|
"grad_norm": 1.3509997129440308, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.8172, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.049029622063329927, |
|
"grad_norm": 1.4541417360305786, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.7393, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.05311542390194075, |
|
"grad_norm": 1.181699275970459, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.664, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.05720122574055159, |
|
"grad_norm": 0.9503294825553894, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.6222, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06128702757916241, |
|
"grad_norm": 0.7614471316337585, |
|
"learning_rate": 1e-05, |
|
"loss": 0.56, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06537282941777324, |
|
"grad_norm": 0.9878801107406616, |
|
"learning_rate": 1.0666666666666667e-05, |
|
"loss": 0.5548, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.06945863125638406, |
|
"grad_norm": 0.8131901025772095, |
|
"learning_rate": 1.1333333333333334e-05, |
|
"loss": 0.4878, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0735444330949949, |
|
"grad_norm": 0.7322743535041809, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.5159, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.07763023493360573, |
|
"grad_norm": 0.6428759098052979, |
|
"learning_rate": 1.2666666666666667e-05, |
|
"loss": 0.4575, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.08171603677221655, |
|
"grad_norm": 0.562318742275238, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.4571, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08580183861082738, |
|
"grad_norm": 0.5707699060440063, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.4592, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0898876404494382, |
|
"grad_norm": 0.5272228717803955, |
|
"learning_rate": 1.4666666666666666e-05, |
|
"loss": 0.4457, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.09397344228804903, |
|
"grad_norm": 0.5120903253555298, |
|
"learning_rate": 1.5333333333333334e-05, |
|
"loss": 0.4034, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.09805924412665985, |
|
"grad_norm": 0.46359285712242126, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.4037, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.10214504596527069, |
|
"grad_norm": 0.49431198835372925, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.3875, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1062308478038815, |
|
"grad_norm": 0.4450273811817169, |
|
"learning_rate": 1.7333333333333336e-05, |
|
"loss": 0.3797, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.11031664964249234, |
|
"grad_norm": 0.4551868140697479, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.3512, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.11440245148110317, |
|
"grad_norm": 0.5083736777305603, |
|
"learning_rate": 1.866666666666667e-05, |
|
"loss": 0.3906, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.118488253319714, |
|
"grad_norm": 0.47295963764190674, |
|
"learning_rate": 1.9333333333333333e-05, |
|
"loss": 0.3554, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.12257405515832483, |
|
"grad_norm": 0.4848616123199463, |
|
"learning_rate": 2e-05, |
|
"loss": 0.3712, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12665985699693566, |
|
"grad_norm": 0.4398118555545807, |
|
"learning_rate": 1.999989986294826e-05, |
|
"loss": 0.3694, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.13074565883554648, |
|
"grad_norm": 0.41183602809906006, |
|
"learning_rate": 1.9999599453798523e-05, |
|
"loss": 0.3336, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.1348314606741573, |
|
"grad_norm": 0.492713987827301, |
|
"learning_rate": 1.999909877856721e-05, |
|
"loss": 0.3657, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.13891726251276812, |
|
"grad_norm": 0.4517015516757965, |
|
"learning_rate": 1.9998397847281548e-05, |
|
"loss": 0.367, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.14300306435137897, |
|
"grad_norm": 0.4641965627670288, |
|
"learning_rate": 1.9997496673979375e-05, |
|
"loss": 0.3565, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1470888661899898, |
|
"grad_norm": 0.4812065064907074, |
|
"learning_rate": 1.9996395276708856e-05, |
|
"loss": 0.3773, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.1511746680286006, |
|
"grad_norm": 0.42300987243652344, |
|
"learning_rate": 1.999509367752813e-05, |
|
"loss": 0.3643, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.15526046986721145, |
|
"grad_norm": 0.4512963593006134, |
|
"learning_rate": 1.9993591902504854e-05, |
|
"loss": 0.3409, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.15934627170582227, |
|
"grad_norm": 0.41626426577568054, |
|
"learning_rate": 1.9991889981715696e-05, |
|
"loss": 0.3546, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1634320735444331, |
|
"grad_norm": 0.43549367785453796, |
|
"learning_rate": 1.9989987949245725e-05, |
|
"loss": 0.3091, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1675178753830439, |
|
"grad_norm": 0.4042600393295288, |
|
"learning_rate": 1.9987885843187717e-05, |
|
"loss": 0.3174, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.17160367722165476, |
|
"grad_norm": 0.4394363462924957, |
|
"learning_rate": 1.9985583705641418e-05, |
|
"loss": 0.3601, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.17568947906026558, |
|
"grad_norm": 0.4294170141220093, |
|
"learning_rate": 1.9983081582712684e-05, |
|
"loss": 0.3283, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1797752808988764, |
|
"grad_norm": 0.44452300667762756, |
|
"learning_rate": 1.998037952451255e-05, |
|
"loss": 0.3367, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.18386108273748722, |
|
"grad_norm": 0.4113090932369232, |
|
"learning_rate": 1.9977477585156252e-05, |
|
"loss": 0.2986, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.18794688457609807, |
|
"grad_norm": 0.44443050026893616, |
|
"learning_rate": 1.9974375822762117e-05, |
|
"loss": 0.3463, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.1920326864147089, |
|
"grad_norm": 0.4303809106349945, |
|
"learning_rate": 1.9971074299450414e-05, |
|
"loss": 0.3281, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1961184882533197, |
|
"grad_norm": 0.4178621470928192, |
|
"learning_rate": 1.9967573081342103e-05, |
|
"loss": 0.3629, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.20020429009193055, |
|
"grad_norm": 0.38657113909721375, |
|
"learning_rate": 1.9963872238557516e-05, |
|
"loss": 0.3225, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.20429009193054137, |
|
"grad_norm": 0.5300270915031433, |
|
"learning_rate": 1.9959971845214953e-05, |
|
"loss": 0.3279, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2083758937691522, |
|
"grad_norm": 0.4061177968978882, |
|
"learning_rate": 1.9955871979429188e-05, |
|
"loss": 0.3278, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.212461695607763, |
|
"grad_norm": 0.41504785418510437, |
|
"learning_rate": 1.9951572723309918e-05, |
|
"loss": 0.3096, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.21654749744637386, |
|
"grad_norm": 0.4208971858024597, |
|
"learning_rate": 1.9947074162960113e-05, |
|
"loss": 0.3187, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.22063329928498468, |
|
"grad_norm": 0.36819201707839966, |
|
"learning_rate": 1.9942376388474282e-05, |
|
"loss": 0.3167, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2247191011235955, |
|
"grad_norm": 0.43327596783638, |
|
"learning_rate": 1.993747949393668e-05, |
|
"loss": 0.3188, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.22880490296220635, |
|
"grad_norm": 0.4377865791320801, |
|
"learning_rate": 1.9932383577419432e-05, |
|
"loss": 0.3478, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.23289070480081717, |
|
"grad_norm": 0.43336397409439087, |
|
"learning_rate": 1.992708874098054e-05, |
|
"loss": 0.3025, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.236976506639428, |
|
"grad_norm": 0.4399135410785675, |
|
"learning_rate": 1.9921595090661872e-05, |
|
"loss": 0.3098, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2410623084780388, |
|
"grad_norm": 0.4253901243209839, |
|
"learning_rate": 1.991590273648702e-05, |
|
"loss": 0.3303, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.24514811031664965, |
|
"grad_norm": 0.39254307746887207, |
|
"learning_rate": 1.9910011792459086e-05, |
|
"loss": 0.3018, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.24923391215526047, |
|
"grad_norm": 0.4217659831047058, |
|
"learning_rate": 1.9903922376558432e-05, |
|
"loss": 0.285, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.2533197139938713, |
|
"grad_norm": 0.48558109998703003, |
|
"learning_rate": 1.989763461074029e-05, |
|
"loss": 0.3221, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2574055158324821, |
|
"grad_norm": 0.47454214096069336, |
|
"learning_rate": 1.989114862093232e-05, |
|
"loss": 0.3056, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.26149131767109296, |
|
"grad_norm": 0.4013993442058563, |
|
"learning_rate": 1.9884464537032103e-05, |
|
"loss": 0.3376, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.26557711950970375, |
|
"grad_norm": 0.4264606237411499, |
|
"learning_rate": 1.9877582492904533e-05, |
|
"loss": 0.3158, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2696629213483146, |
|
"grad_norm": 0.5440453886985779, |
|
"learning_rate": 1.9870502626379127e-05, |
|
"loss": 0.3056, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.27374872318692545, |
|
"grad_norm": 0.40003377199172974, |
|
"learning_rate": 1.9863225079247286e-05, |
|
"loss": 0.3357, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.27783452502553624, |
|
"grad_norm": 0.39155763387680054, |
|
"learning_rate": 1.985574999725943e-05, |
|
"loss": 0.2819, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2819203268641471, |
|
"grad_norm": 0.4461009204387665, |
|
"learning_rate": 1.9848077530122083e-05, |
|
"loss": 0.2732, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.28600612870275793, |
|
"grad_norm": 0.38970062136650085, |
|
"learning_rate": 1.9840207831494903e-05, |
|
"loss": 0.2957, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2900919305413687, |
|
"grad_norm": 0.4369664788246155, |
|
"learning_rate": 1.983214105898757e-05, |
|
"loss": 0.3158, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2941777323799796, |
|
"grad_norm": 0.4734659492969513, |
|
"learning_rate": 1.9823877374156647e-05, |
|
"loss": 0.3054, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2982635342185904, |
|
"grad_norm": 0.3933468461036682, |
|
"learning_rate": 1.9815416942502346e-05, |
|
"loss": 0.286, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3023493360572012, |
|
"grad_norm": 0.4472273290157318, |
|
"learning_rate": 1.98067599334652e-05, |
|
"loss": 0.3149, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.30643513789581206, |
|
"grad_norm": 0.43143752217292786, |
|
"learning_rate": 1.979790652042268e-05, |
|
"loss": 0.2792, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3105209397344229, |
|
"grad_norm": 0.4325246512889862, |
|
"learning_rate": 1.978885688068572e-05, |
|
"loss": 0.3024, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3146067415730337, |
|
"grad_norm": 0.48796600103378296, |
|
"learning_rate": 1.9779611195495177e-05, |
|
"loss": 0.3343, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.31869254341164455, |
|
"grad_norm": 0.40505748987197876, |
|
"learning_rate": 1.977016965001817e-05, |
|
"loss": 0.2753, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.32277834525025534, |
|
"grad_norm": 0.40753036737442017, |
|
"learning_rate": 1.976053243334442e-05, |
|
"loss": 0.3073, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.3268641470888662, |
|
"grad_norm": 0.4000149071216583, |
|
"learning_rate": 1.9750699738482403e-05, |
|
"loss": 0.284, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.33094994892747703, |
|
"grad_norm": 0.42099907994270325, |
|
"learning_rate": 1.9740671762355548e-05, |
|
"loss": 0.2881, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3350357507660878, |
|
"grad_norm": 0.4155902564525604, |
|
"learning_rate": 1.973044870579824e-05, |
|
"loss": 0.2969, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3350357507660878, |
|
"eval_loss": 0.31923907995224, |
|
"eval_runtime": 5.81, |
|
"eval_samples_per_second": 13.597, |
|
"eval_steps_per_second": 1.721, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.3391215526046987, |
|
"grad_norm": 0.39282551407814026, |
|
"learning_rate": 1.972003077355183e-05, |
|
"loss": 0.2948, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3432073544433095, |
|
"grad_norm": 0.4381943643093109, |
|
"learning_rate": 1.9709418174260523e-05, |
|
"loss": 0.3454, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.3472931562819203, |
|
"grad_norm": 0.4093382954597473, |
|
"learning_rate": 1.9698611120467196e-05, |
|
"loss": 0.2962, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.35137895812053116, |
|
"grad_norm": 0.450135737657547, |
|
"learning_rate": 1.9687609828609156e-05, |
|
"loss": 0.3243, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.355464759959142, |
|
"grad_norm": 0.4139018654823303, |
|
"learning_rate": 1.9676414519013782e-05, |
|
"loss": 0.2996, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3595505617977528, |
|
"grad_norm": 0.40026575326919556, |
|
"learning_rate": 1.966502541589414e-05, |
|
"loss": 0.2788, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 0.36627820134162903, |
|
"learning_rate": 1.965344274734447e-05, |
|
"loss": 0.2857, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.36772216547497444, |
|
"grad_norm": 0.42685478925704956, |
|
"learning_rate": 1.9641666745335626e-05, |
|
"loss": 0.2995, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3718079673135853, |
|
"grad_norm": 0.374288946390152, |
|
"learning_rate": 1.9629697645710432e-05, |
|
"loss": 0.3056, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.37589376915219613, |
|
"grad_norm": 0.3649786114692688, |
|
"learning_rate": 1.961753568817896e-05, |
|
"loss": 0.2854, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3799795709908069, |
|
"grad_norm": 0.38573023676872253, |
|
"learning_rate": 1.9605181116313725e-05, |
|
"loss": 0.2667, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3840653728294178, |
|
"grad_norm": 0.37577807903289795, |
|
"learning_rate": 1.9592634177544803e-05, |
|
"loss": 0.2815, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.3881511746680286, |
|
"grad_norm": 0.4320047199726105, |
|
"learning_rate": 1.957989512315489e-05, |
|
"loss": 0.3094, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3922369765066394, |
|
"grad_norm": 0.3816889524459839, |
|
"learning_rate": 1.9566964208274254e-05, |
|
"loss": 0.292, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.39632277834525026, |
|
"grad_norm": 0.3946669399738312, |
|
"learning_rate": 1.9553841691875632e-05, |
|
"loss": 0.3002, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.4004085801838611, |
|
"grad_norm": 0.36885613203048706, |
|
"learning_rate": 1.9540527836769047e-05, |
|
"loss": 0.2583, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4044943820224719, |
|
"grad_norm": 0.37865176796913147, |
|
"learning_rate": 1.9527022909596537e-05, |
|
"loss": 0.2787, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.40858018386108275, |
|
"grad_norm": 0.4429585337638855, |
|
"learning_rate": 1.951332718082682e-05, |
|
"loss": 0.3226, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.41266598569969354, |
|
"grad_norm": 0.3926009237766266, |
|
"learning_rate": 1.9499440924749878e-05, |
|
"loss": 0.2914, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.4167517875383044, |
|
"grad_norm": 0.3467339277267456, |
|
"learning_rate": 1.9485364419471454e-05, |
|
"loss": 0.266, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.42083758937691523, |
|
"grad_norm": 0.4126642644405365, |
|
"learning_rate": 1.9471097946907506e-05, |
|
"loss": 0.2775, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.424923391215526, |
|
"grad_norm": 0.44586020708084106, |
|
"learning_rate": 1.9456641792778527e-05, |
|
"loss": 0.2884, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.4290091930541369, |
|
"grad_norm": 0.3969588279724121, |
|
"learning_rate": 1.9441996246603848e-05, |
|
"loss": 0.2835, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4330949948927477, |
|
"grad_norm": 0.38928356766700745, |
|
"learning_rate": 1.9427161601695833e-05, |
|
"loss": 0.2826, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.4371807967313585, |
|
"grad_norm": 0.4089799225330353, |
|
"learning_rate": 1.9412138155154e-05, |
|
"loss": 0.2817, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.44126659856996936, |
|
"grad_norm": 0.375505656003952, |
|
"learning_rate": 1.9396926207859085e-05, |
|
"loss": 0.2882, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.4453524004085802, |
|
"grad_norm": 0.406118780374527, |
|
"learning_rate": 1.9381526064466995e-05, |
|
"loss": 0.2861, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.449438202247191, |
|
"grad_norm": 0.3882409334182739, |
|
"learning_rate": 1.9365938033402715e-05, |
|
"loss": 0.261, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.45352400408580185, |
|
"grad_norm": 0.4351583421230316, |
|
"learning_rate": 1.9350162426854152e-05, |
|
"loss": 0.3014, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.4576098059244127, |
|
"grad_norm": 0.3621097505092621, |
|
"learning_rate": 1.933419956076584e-05, |
|
"loss": 0.2728, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.4616956077630235, |
|
"grad_norm": 0.3881032466888428, |
|
"learning_rate": 1.9318049754832656e-05, |
|
"loss": 0.2736, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.46578140960163433, |
|
"grad_norm": 0.37627285718917847, |
|
"learning_rate": 1.9301713332493386e-05, |
|
"loss": 0.2707, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.4698672114402451, |
|
"grad_norm": 0.4285913109779358, |
|
"learning_rate": 1.9285190620924267e-05, |
|
"loss": 0.2815, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.473953013278856, |
|
"grad_norm": 0.35718926787376404, |
|
"learning_rate": 1.926848195103242e-05, |
|
"loss": 0.2621, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.4780388151174668, |
|
"grad_norm": 0.3852044641971588, |
|
"learning_rate": 1.925158765744924e-05, |
|
"loss": 0.283, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.4821246169560776, |
|
"grad_norm": 0.3884032368659973, |
|
"learning_rate": 1.923450807852367e-05, |
|
"loss": 0.2711, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.48621041879468846, |
|
"grad_norm": 0.4398249685764313, |
|
"learning_rate": 1.9217243556315445e-05, |
|
"loss": 0.2757, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.4902962206332993, |
|
"grad_norm": 0.36689624190330505, |
|
"learning_rate": 1.9199794436588244e-05, |
|
"loss": 0.2669, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.4943820224719101, |
|
"grad_norm": 0.46398666501045227, |
|
"learning_rate": 1.9182161068802742e-05, |
|
"loss": 0.2683, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.49846782431052095, |
|
"grad_norm": 0.40020987391471863, |
|
"learning_rate": 1.916434380610963e-05, |
|
"loss": 0.2927, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5025536261491318, |
|
"grad_norm": 0.4032459259033203, |
|
"learning_rate": 1.9146343005342546e-05, |
|
"loss": 0.31, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5066394279877426, |
|
"grad_norm": 0.44166550040245056, |
|
"learning_rate": 1.912815902701091e-05, |
|
"loss": 0.2842, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5107252298263534, |
|
"grad_norm": 0.39895153045654297, |
|
"learning_rate": 1.9109792235292715e-05, |
|
"loss": 0.2766, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5148110316649642, |
|
"grad_norm": 0.3415013253688812, |
|
"learning_rate": 1.909124299802724e-05, |
|
"loss": 0.2761, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5188968335035751, |
|
"grad_norm": 0.3837663531303406, |
|
"learning_rate": 1.9072511686707663e-05, |
|
"loss": 0.2797, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5229826353421859, |
|
"grad_norm": 0.4030819833278656, |
|
"learning_rate": 1.9053598676473656e-05, |
|
"loss": 0.2932, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5270684371807968, |
|
"grad_norm": 0.40120938420295715, |
|
"learning_rate": 1.9034504346103825e-05, |
|
"loss": 0.2698, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5311542390194075, |
|
"grad_norm": 0.3621327579021454, |
|
"learning_rate": 1.9015229078008163e-05, |
|
"loss": 0.298, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5352400408580184, |
|
"grad_norm": 0.33476150035858154, |
|
"learning_rate": 1.8995773258220374e-05, |
|
"loss": 0.2612, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5393258426966292, |
|
"grad_norm": 0.3523140549659729, |
|
"learning_rate": 1.8976137276390145e-05, |
|
"loss": 0.2671, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.54341164453524, |
|
"grad_norm": 0.3624558746814728, |
|
"learning_rate": 1.8956321525775337e-05, |
|
"loss": 0.2687, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.5474974463738509, |
|
"grad_norm": 0.35892072319984436, |
|
"learning_rate": 1.8936326403234125e-05, |
|
"loss": 0.2755, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.5515832482124617, |
|
"grad_norm": 0.3678256869316101, |
|
"learning_rate": 1.891615230921703e-05, |
|
"loss": 0.278, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5556690500510725, |
|
"grad_norm": 0.38125160336494446, |
|
"learning_rate": 1.8895799647758912e-05, |
|
"loss": 0.2765, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.5597548518896833, |
|
"grad_norm": 0.40152257680892944, |
|
"learning_rate": 1.8875268826470875e-05, |
|
"loss": 0.3239, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.5638406537282942, |
|
"grad_norm": 0.3935178816318512, |
|
"learning_rate": 1.8854560256532098e-05, |
|
"loss": 0.2956, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.567926455566905, |
|
"grad_norm": 0.4389478266239166, |
|
"learning_rate": 1.8833674352681613e-05, |
|
"loss": 0.2968, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.5720122574055159, |
|
"grad_norm": 0.3884355127811432, |
|
"learning_rate": 1.881261153320999e-05, |
|
"loss": 0.3074, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5760980592441267, |
|
"grad_norm": 0.4054373502731323, |
|
"learning_rate": 1.879137221995095e-05, |
|
"loss": 0.2996, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.5801838610827375, |
|
"grad_norm": 0.4423893690109253, |
|
"learning_rate": 1.8769956838272937e-05, |
|
"loss": 0.3082, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.5842696629213483, |
|
"grad_norm": 0.42978307604789734, |
|
"learning_rate": 1.8748365817070586e-05, |
|
"loss": 0.2878, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.5883554647599591, |
|
"grad_norm": 0.38182228803634644, |
|
"learning_rate": 1.8726599588756144e-05, |
|
"loss": 0.2649, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.59244126659857, |
|
"grad_norm": 0.43477413058280945, |
|
"learning_rate": 1.8704658589250795e-05, |
|
"loss": 0.271, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5965270684371808, |
|
"grad_norm": 0.3876926898956299, |
|
"learning_rate": 1.868254325797594e-05, |
|
"loss": 0.2804, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6006128702757916, |
|
"grad_norm": 0.39310601353645325, |
|
"learning_rate": 1.866025403784439e-05, |
|
"loss": 0.2767, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6046986721144024, |
|
"grad_norm": 0.421290785074234, |
|
"learning_rate": 1.8637791375251505e-05, |
|
"loss": 0.2668, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6087844739530133, |
|
"grad_norm": 0.450023353099823, |
|
"learning_rate": 1.8615155720066247e-05, |
|
"loss": 0.2888, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6128702757916241, |
|
"grad_norm": 0.3645341396331787, |
|
"learning_rate": 1.859234752562217e-05, |
|
"loss": 0.2828, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.616956077630235, |
|
"grad_norm": 0.41853606700897217, |
|
"learning_rate": 1.8569367248708343e-05, |
|
"loss": 0.284, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6210418794688458, |
|
"grad_norm": 0.3675737679004669, |
|
"learning_rate": 1.8546215349560204e-05, |
|
"loss": 0.2933, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.6251276813074566, |
|
"grad_norm": 0.3668256998062134, |
|
"learning_rate": 1.8522892291850335e-05, |
|
"loss": 0.2729, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6292134831460674, |
|
"grad_norm": 0.34576019644737244, |
|
"learning_rate": 1.849939854267919e-05, |
|
"loss": 0.2612, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.6332992849846782, |
|
"grad_norm": 0.41370126605033875, |
|
"learning_rate": 1.847573457256571e-05, |
|
"loss": 0.2693, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6373850868232891, |
|
"grad_norm": 0.4205566644668579, |
|
"learning_rate": 1.845190085543795e-05, |
|
"loss": 0.2746, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.6414708886618999, |
|
"grad_norm": 0.3997614085674286, |
|
"learning_rate": 1.8427897868623535e-05, |
|
"loss": 0.2813, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.6455566905005107, |
|
"grad_norm": 0.41005200147628784, |
|
"learning_rate": 1.840372609284013e-05, |
|
"loss": 0.2647, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.6496424923391215, |
|
"grad_norm": 0.4547550678253174, |
|
"learning_rate": 1.8379386012185813e-05, |
|
"loss": 0.2791, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.6537282941777324, |
|
"grad_norm": 0.4075047969818115, |
|
"learning_rate": 1.8354878114129368e-05, |
|
"loss": 0.2769, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6578140960163432, |
|
"grad_norm": 0.37060046195983887, |
|
"learning_rate": 1.8330202889500518e-05, |
|
"loss": 0.3028, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.6618998978549541, |
|
"grad_norm": 0.35541340708732605, |
|
"learning_rate": 1.8305360832480118e-05, |
|
"loss": 0.2981, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.6659856996935649, |
|
"grad_norm": 0.3970625400543213, |
|
"learning_rate": 1.8280352440590236e-05, |
|
"loss": 0.2634, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.6700715015321757, |
|
"grad_norm": 0.4075865149497986, |
|
"learning_rate": 1.82551782146842e-05, |
|
"loss": 0.3027, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.6700715015321757, |
|
"eval_loss": 0.291363924741745, |
|
"eval_runtime": 5.7936, |
|
"eval_samples_per_second": 13.636, |
|
"eval_steps_per_second": 1.726, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.6741573033707865, |
|
"grad_norm": 0.34390076994895935, |
|
"learning_rate": 1.8229838658936566e-05, |
|
"loss": 0.2536, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6782431052093973, |
|
"grad_norm": 0.3729197084903717, |
|
"learning_rate": 1.8204334280833005e-05, |
|
"loss": 0.2739, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.6823289070480082, |
|
"grad_norm": 0.3974601924419403, |
|
"learning_rate": 1.817866559116017e-05, |
|
"loss": 0.2858, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.686414708886619, |
|
"grad_norm": 0.3424644470214844, |
|
"learning_rate": 1.8152833103995443e-05, |
|
"loss": 0.2305, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.6905005107252298, |
|
"grad_norm": 0.4293709397315979, |
|
"learning_rate": 1.8126837336696645e-05, |
|
"loss": 0.3179, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.6945863125638406, |
|
"grad_norm": 0.3259459435939789, |
|
"learning_rate": 1.8100678809891668e-05, |
|
"loss": 0.2589, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6986721144024515, |
|
"grad_norm": 0.40771302580833435, |
|
"learning_rate": 1.807435804746807e-05, |
|
"loss": 0.2637, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.7027579162410623, |
|
"grad_norm": 0.3847212493419647, |
|
"learning_rate": 1.8047875576562556e-05, |
|
"loss": 0.2782, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.7068437180796732, |
|
"grad_norm": 0.35547974705696106, |
|
"learning_rate": 1.802123192755044e-05, |
|
"loss": 0.2695, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.710929519918284, |
|
"grad_norm": 0.3954298198223114, |
|
"learning_rate": 1.7994427634035016e-05, |
|
"loss": 0.3005, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7150153217568948, |
|
"grad_norm": 0.3506409525871277, |
|
"learning_rate": 1.796746323283686e-05, |
|
"loss": 0.2716, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7191011235955056, |
|
"grad_norm": 0.42227277159690857, |
|
"learning_rate": 1.7940339263983112e-05, |
|
"loss": 0.2915, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.7231869254341164, |
|
"grad_norm": 0.3948259949684143, |
|
"learning_rate": 1.791305627069662e-05, |
|
"loss": 0.2883, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 0.3580792248249054, |
|
"learning_rate": 1.7885614799385086e-05, |
|
"loss": 0.2782, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.7313585291113381, |
|
"grad_norm": 0.39698660373687744, |
|
"learning_rate": 1.785801539963012e-05, |
|
"loss": 0.2657, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.7354443309499489, |
|
"grad_norm": 0.3663792610168457, |
|
"learning_rate": 1.7830258624176224e-05, |
|
"loss": 0.2686, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7395301327885597, |
|
"grad_norm": 0.38216930627822876, |
|
"learning_rate": 1.7802345028919728e-05, |
|
"loss": 0.2706, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.7436159346271706, |
|
"grad_norm": 0.4187450706958771, |
|
"learning_rate": 1.777427517289766e-05, |
|
"loss": 0.2573, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.7477017364657814, |
|
"grad_norm": 0.34619036316871643, |
|
"learning_rate": 1.7746049618276545e-05, |
|
"loss": 0.269, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.7517875383043923, |
|
"grad_norm": 0.35370582342147827, |
|
"learning_rate": 1.7717668930341152e-05, |
|
"loss": 0.2552, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.7558733401430031, |
|
"grad_norm": 0.4264880418777466, |
|
"learning_rate": 1.768913367748316e-05, |
|
"loss": 0.2952, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.7599591419816139, |
|
"grad_norm": 0.39135676622390747, |
|
"learning_rate": 1.766044443118978e-05, |
|
"loss": 0.2661, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.7640449438202247, |
|
"grad_norm": 0.39061596989631653, |
|
"learning_rate": 1.7631601766032337e-05, |
|
"loss": 0.2737, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.7681307456588355, |
|
"grad_norm": 0.3799816966056824, |
|
"learning_rate": 1.7602606259654704e-05, |
|
"loss": 0.2767, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.7722165474974464, |
|
"grad_norm": 0.3592148721218109, |
|
"learning_rate": 1.7573458492761802e-05, |
|
"loss": 0.2448, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.7763023493360572, |
|
"grad_norm": 0.39084604382514954, |
|
"learning_rate": 1.7544159049107902e-05, |
|
"loss": 0.275, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.780388151174668, |
|
"grad_norm": 0.36443451046943665, |
|
"learning_rate": 1.7514708515485002e-05, |
|
"loss": 0.2645, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.7844739530132788, |
|
"grad_norm": 0.4001200497150421, |
|
"learning_rate": 1.7485107481711014e-05, |
|
"loss": 0.2724, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.7885597548518897, |
|
"grad_norm": 0.39093396067619324, |
|
"learning_rate": 1.7455356540617988e-05, |
|
"loss": 0.2712, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.7926455566905005, |
|
"grad_norm": 0.3430577218532562, |
|
"learning_rate": 1.7425456288040236e-05, |
|
"loss": 0.2489, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.7967313585291114, |
|
"grad_norm": 0.3573733866214752, |
|
"learning_rate": 1.7395407322802374e-05, |
|
"loss": 0.2696, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.8008171603677222, |
|
"grad_norm": 0.38158077001571655, |
|
"learning_rate": 1.736521024670737e-05, |
|
"loss": 0.2814, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.804902962206333, |
|
"grad_norm": 0.366470068693161, |
|
"learning_rate": 1.733486566452446e-05, |
|
"loss": 0.2529, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8089887640449438, |
|
"grad_norm": 0.3718278408050537, |
|
"learning_rate": 1.7304374183977032e-05, |
|
"loss": 0.2747, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.8130745658835546, |
|
"grad_norm": 0.3395809233188629, |
|
"learning_rate": 1.7273736415730488e-05, |
|
"loss": 0.2693, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.8171603677221655, |
|
"grad_norm": 0.307731032371521, |
|
"learning_rate": 1.7242952973379983e-05, |
|
"loss": 0.2081, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8212461695607763, |
|
"grad_norm": 0.3522433936595917, |
|
"learning_rate": 1.7212024473438145e-05, |
|
"loss": 0.2495, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.8253319713993871, |
|
"grad_norm": 0.35946980118751526, |
|
"learning_rate": 1.7180951535322742e-05, |
|
"loss": 0.2702, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.8294177732379979, |
|
"grad_norm": 0.3933047950267792, |
|
"learning_rate": 1.7149734781344247e-05, |
|
"loss": 0.2629, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.8335035750766088, |
|
"grad_norm": 0.3658384084701538, |
|
"learning_rate": 1.7118374836693407e-05, |
|
"loss": 0.2538, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.8375893769152196, |
|
"grad_norm": 0.3532220423221588, |
|
"learning_rate": 1.7086872329428702e-05, |
|
"loss": 0.2587, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.8416751787538305, |
|
"grad_norm": 0.3619686961174011, |
|
"learning_rate": 1.705522789046377e-05, |
|
"loss": 0.2658, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.8457609805924413, |
|
"grad_norm": 0.4083801209926605, |
|
"learning_rate": 1.7023442153554776e-05, |
|
"loss": 0.2614, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.849846782431052, |
|
"grad_norm": 0.3868924081325531, |
|
"learning_rate": 1.6991515755287715e-05, |
|
"loss": 0.2831, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.8539325842696629, |
|
"grad_norm": 0.38413897156715393, |
|
"learning_rate": 1.695944933506567e-05, |
|
"loss": 0.2596, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.8580183861082737, |
|
"grad_norm": 0.34999531507492065, |
|
"learning_rate": 1.6927243535095995e-05, |
|
"loss": 0.2842, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8621041879468846, |
|
"grad_norm": 0.328204482793808, |
|
"learning_rate": 1.6894899000377462e-05, |
|
"loss": 0.2332, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.8661899897854954, |
|
"grad_norm": 0.3802552819252014, |
|
"learning_rate": 1.686241637868734e-05, |
|
"loss": 0.2709, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.8702757916241062, |
|
"grad_norm": 0.35758858919143677, |
|
"learning_rate": 1.6829796320568416e-05, |
|
"loss": 0.279, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.874361593462717, |
|
"grad_norm": 0.3561984896659851, |
|
"learning_rate": 1.6797039479315994e-05, |
|
"loss": 0.2868, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.8784473953013279, |
|
"grad_norm": 0.32591065764427185, |
|
"learning_rate": 1.6764146510964762e-05, |
|
"loss": 0.2485, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.8825331971399387, |
|
"grad_norm": 0.36409640312194824, |
|
"learning_rate": 1.67311180742757e-05, |
|
"loss": 0.2577, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.8866189989785496, |
|
"grad_norm": 0.34685492515563965, |
|
"learning_rate": 1.669795483072287e-05, |
|
"loss": 0.247, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.8907048008171604, |
|
"grad_norm": 0.3445712625980377, |
|
"learning_rate": 1.6664657444480145e-05, |
|
"loss": 0.2565, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.8947906026557712, |
|
"grad_norm": 0.34710460901260376, |
|
"learning_rate": 1.6631226582407954e-05, |
|
"loss": 0.2363, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.898876404494382, |
|
"grad_norm": 0.33726766705513, |
|
"learning_rate": 1.6597662914039885e-05, |
|
"loss": 0.2483, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.9029622063329928, |
|
"grad_norm": 0.34024032950401306, |
|
"learning_rate": 1.65639671115693e-05, |
|
"loss": 0.2474, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.9070480081716037, |
|
"grad_norm": 0.38807395100593567, |
|
"learning_rate": 1.653013984983585e-05, |
|
"loss": 0.2726, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.9111338100102145, |
|
"grad_norm": 0.36375290155410767, |
|
"learning_rate": 1.6496181806312005e-05, |
|
"loss": 0.2726, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.9152196118488254, |
|
"grad_norm": 0.36927178502082825, |
|
"learning_rate": 1.6462093661089432e-05, |
|
"loss": 0.2518, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.9193054136874361, |
|
"grad_norm": 0.3809269070625305, |
|
"learning_rate": 1.6427876096865394e-05, |
|
"loss": 0.2449, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.923391215526047, |
|
"grad_norm": 0.34634968638420105, |
|
"learning_rate": 1.6393529798929103e-05, |
|
"loss": 0.2575, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.9274770173646578, |
|
"grad_norm": 0.33054831624031067, |
|
"learning_rate": 1.635905545514795e-05, |
|
"loss": 0.2639, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.9315628192032687, |
|
"grad_norm": 0.35482174158096313, |
|
"learning_rate": 1.6324453755953772e-05, |
|
"loss": 0.2667, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.9356486210418795, |
|
"grad_norm": 0.3657509684562683, |
|
"learning_rate": 1.6289725394328998e-05, |
|
"loss": 0.255, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.9397344228804902, |
|
"grad_norm": 0.3343275785446167, |
|
"learning_rate": 1.6254871065792776e-05, |
|
"loss": 0.2336, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.9438202247191011, |
|
"grad_norm": 0.3493170142173767, |
|
"learning_rate": 1.621989146838704e-05, |
|
"loss": 0.2649, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.947906026557712, |
|
"grad_norm": 0.3305867612361908, |
|
"learning_rate": 1.618478730266255e-05, |
|
"loss": 0.2767, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.9519918283963228, |
|
"grad_norm": 0.35817259550094604, |
|
"learning_rate": 1.6149559271664835e-05, |
|
"loss": 0.2817, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.9560776302349336, |
|
"grad_norm": 0.37733370065689087, |
|
"learning_rate": 1.6114208080920125e-05, |
|
"loss": 0.2809, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.9601634320735445, |
|
"grad_norm": 0.3227766156196594, |
|
"learning_rate": 1.607873443842122e-05, |
|
"loss": 0.2545, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.9642492339121552, |
|
"grad_norm": 0.3445710241794586, |
|
"learning_rate": 1.6043139054613326e-05, |
|
"loss": 0.2476, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.9683350357507661, |
|
"grad_norm": 0.3375508785247803, |
|
"learning_rate": 1.600742264237979e-05, |
|
"loss": 0.2502, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.9724208375893769, |
|
"grad_norm": 0.356039434671402, |
|
"learning_rate": 1.5971585917027864e-05, |
|
"loss": 0.268, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.9765066394279878, |
|
"grad_norm": 0.34852373600006104, |
|
"learning_rate": 1.5935629596274345e-05, |
|
"loss": 0.2605, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.9805924412665986, |
|
"grad_norm": 0.3376101851463318, |
|
"learning_rate": 1.5899554400231233e-05, |
|
"loss": 0.2567, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.9846782431052093, |
|
"grad_norm": 0.32361170649528503, |
|
"learning_rate": 1.586336105139127e-05, |
|
"loss": 0.2481, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.9887640449438202, |
|
"grad_norm": 0.35558903217315674, |
|
"learning_rate": 1.5827050274613512e-05, |
|
"loss": 0.2514, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.992849846782431, |
|
"grad_norm": 0.31636619567871094, |
|
"learning_rate": 1.579062279710879e-05, |
|
"loss": 0.2237, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.9969356486210419, |
|
"grad_norm": 0.3540779948234558, |
|
"learning_rate": 1.5754079348425137e-05, |
|
"loss": 0.2381, |
|
"step": 244 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 732, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 244, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.127797840707584e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|