|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9984301412872841, |
|
"eval_steps": 500, |
|
"global_step": 477, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0020931449502878076, |
|
"grad_norm": 3.2714715375601737, |
|
"learning_rate": 1.0416666666666666e-08, |
|
"logits/chosen": -1.3119012117385864, |
|
"logits/rejected": -1.3116823434829712, |
|
"logps/chosen": -358.818603515625, |
|
"logps/rejected": -361.4830322265625, |
|
"loss": 0.6931, |
|
"rewards/accuracies": 0.0, |
|
"rewards/chosen": 0.0, |
|
"rewards/margins": 0.0, |
|
"rewards/rejected": 0.0, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.020931449502878074, |
|
"grad_norm": 3.1981499549192196, |
|
"learning_rate": 1.0416666666666667e-07, |
|
"logits/chosen": -1.2483385801315308, |
|
"logits/rejected": -1.2390522956848145, |
|
"logps/chosen": -300.1659851074219, |
|
"logps/rejected": -278.4343566894531, |
|
"loss": 0.6932, |
|
"rewards/accuracies": 0.4375, |
|
"rewards/chosen": 0.0003463309840299189, |
|
"rewards/margins": 0.00022707899915985763, |
|
"rewards/rejected": 0.0001192519994219765, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04186289900575615, |
|
"grad_norm": 2.9666481993391387, |
|
"learning_rate": 2.0833333333333333e-07, |
|
"logits/chosen": -1.2118756771087646, |
|
"logits/rejected": -1.2122180461883545, |
|
"logps/chosen": -312.6712951660156, |
|
"logps/rejected": -281.015869140625, |
|
"loss": 0.6932, |
|
"rewards/accuracies": 0.4749999940395355, |
|
"rewards/chosen": 0.0008083735592663288, |
|
"rewards/margins": -0.0004832709673792124, |
|
"rewards/rejected": 0.0012916445266455412, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06279434850863422, |
|
"grad_norm": 3.032286414272413, |
|
"learning_rate": 3.1249999999999997e-07, |
|
"logits/chosen": -1.22763991355896, |
|
"logits/rejected": -1.2292324304580688, |
|
"logps/chosen": -289.8343811035156, |
|
"logps/rejected": -250.0952911376953, |
|
"loss": 0.6921, |
|
"rewards/accuracies": 0.5687500238418579, |
|
"rewards/chosen": 0.007961934432387352, |
|
"rewards/margins": 0.0013618591474369168, |
|
"rewards/rejected": 0.006600075867027044, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0837257980115123, |
|
"grad_norm": 3.0530878838982813, |
|
"learning_rate": 4.1666666666666667e-07, |
|
"logits/chosen": -1.2131128311157227, |
|
"logits/rejected": -1.2069206237792969, |
|
"logps/chosen": -258.03167724609375, |
|
"logps/rejected": -260.1754150390625, |
|
"loss": 0.6903, |
|
"rewards/accuracies": 0.612500011920929, |
|
"rewards/chosen": 0.022502344101667404, |
|
"rewards/margins": 0.004317447543144226, |
|
"rewards/rejected": 0.018184896558523178, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.10465724751439037, |
|
"grad_norm": 3.05177048406134, |
|
"learning_rate": 4.999731868769026e-07, |
|
"logits/chosen": -1.200819730758667, |
|
"logits/rejected": -1.1967626810073853, |
|
"logps/chosen": -285.07757568359375, |
|
"logps/rejected": -270.8570556640625, |
|
"loss": 0.6863, |
|
"rewards/accuracies": 0.6625000238418579, |
|
"rewards/chosen": 0.04324551671743393, |
|
"rewards/margins": 0.016647540032863617, |
|
"rewards/rejected": 0.026597972959280014, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12558869701726844, |
|
"grad_norm": 3.2417202808010526, |
|
"learning_rate": 4.990353313429303e-07, |
|
"logits/chosen": -1.2190834283828735, |
|
"logits/rejected": -1.21430242061615, |
|
"logps/chosen": -250.82080078125, |
|
"logps/rejected": -243.12191772460938, |
|
"loss": 0.6803, |
|
"rewards/accuracies": 0.7749999761581421, |
|
"rewards/chosen": 0.06585284322500229, |
|
"rewards/margins": 0.030171776190400124, |
|
"rewards/rejected": 0.035681065171957016, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14652014652014653, |
|
"grad_norm": 2.9783130818034245, |
|
"learning_rate": 4.967625656594781e-07, |
|
"logits/chosen": -1.2192996740341187, |
|
"logits/rejected": -1.2149441242218018, |
|
"logps/chosen": -292.11279296875, |
|
"logps/rejected": -281.4046325683594, |
|
"loss": 0.6726, |
|
"rewards/accuracies": 0.699999988079071, |
|
"rewards/chosen": 0.06224682927131653, |
|
"rewards/margins": 0.04184813052415848, |
|
"rewards/rejected": 0.020398706197738647, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1674515960230246, |
|
"grad_norm": 3.7566626119690905, |
|
"learning_rate": 4.93167072587771e-07, |
|
"logits/chosen": -1.2239179611206055, |
|
"logits/rejected": -1.2163951396942139, |
|
"logps/chosen": -324.00897216796875, |
|
"logps/rejected": -258.05609130859375, |
|
"loss": 0.6636, |
|
"rewards/accuracies": 0.7437499761581421, |
|
"rewards/chosen": 0.050625842064619064, |
|
"rewards/margins": 0.07805721461772919, |
|
"rewards/rejected": -0.027431374415755272, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.18838304552590268, |
|
"grad_norm": 3.65927557077053, |
|
"learning_rate": 4.882681251368548e-07, |
|
"logits/chosen": -1.2626798152923584, |
|
"logits/rejected": -1.2355681657791138, |
|
"logps/chosen": -258.53778076171875, |
|
"logps/rejected": -272.76470947265625, |
|
"loss": 0.6454, |
|
"rewards/accuracies": 0.71875, |
|
"rewards/chosen": -0.028159942477941513, |
|
"rewards/margins": 0.10718291997909546, |
|
"rewards/rejected": -0.13534286618232727, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.20931449502878074, |
|
"grad_norm": 3.9637480940035683, |
|
"learning_rate": 4.820919832540181e-07, |
|
"logits/chosen": -1.2693599462509155, |
|
"logits/rejected": -1.2746694087982178, |
|
"logps/chosen": -307.8084411621094, |
|
"logps/rejected": -307.66632080078125, |
|
"loss": 0.6333, |
|
"rewards/accuracies": 0.7437499761581421, |
|
"rewards/chosen": -0.07238290458917618, |
|
"rewards/margins": 0.1892419159412384, |
|
"rewards/rejected": -0.26162484288215637, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2302459445316588, |
|
"grad_norm": 5.324044543906808, |
|
"learning_rate": 4.7467175306295647e-07, |
|
"logits/chosen": -1.2339061498641968, |
|
"logits/rejected": -1.2281516790390015, |
|
"logps/chosen": -300.7259216308594, |
|
"logps/rejected": -303.48419189453125, |
|
"loss": 0.6256, |
|
"rewards/accuracies": 0.6812499761581421, |
|
"rewards/chosen": -0.11372830718755722, |
|
"rewards/margins": 0.19473466277122498, |
|
"rewards/rejected": -0.3084629774093628, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.25117739403453687, |
|
"grad_norm": 5.868949121433513, |
|
"learning_rate": 4.6604720940421207e-07, |
|
"logits/chosen": -1.172503113746643, |
|
"logits/rejected": -1.194064974784851, |
|
"logps/chosen": -299.2550048828125, |
|
"logps/rejected": -319.26580810546875, |
|
"loss": 0.6076, |
|
"rewards/accuracies": 0.6875, |
|
"rewards/chosen": -0.3037038743495941, |
|
"rewards/margins": 0.24693575501441956, |
|
"rewards/rejected": -0.5506396293640137, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.272108843537415, |
|
"grad_norm": 5.532495930770894, |
|
"learning_rate": 4.5626458262912735e-07, |
|
"logits/chosen": -1.2171634435653687, |
|
"logits/rejected": -1.2139819860458374, |
|
"logps/chosen": -306.67327880859375, |
|
"logps/rejected": -327.5093078613281, |
|
"loss": 0.5838, |
|
"rewards/accuracies": 0.731249988079071, |
|
"rewards/chosen": -0.25290900468826294, |
|
"rewards/margins": 0.29776304960250854, |
|
"rewards/rejected": -0.550672173500061, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.29304029304029305, |
|
"grad_norm": 9.379379641862023, |
|
"learning_rate": 4.453763107901675e-07, |
|
"logits/chosen": -1.2662270069122314, |
|
"logits/rejected": -1.2556852102279663, |
|
"logps/chosen": -338.9728088378906, |
|
"logps/rejected": -327.0832824707031, |
|
"loss": 0.5827, |
|
"rewards/accuracies": 0.7437499761581421, |
|
"rewards/chosen": -0.18600435554981232, |
|
"rewards/margins": 0.3997423052787781, |
|
"rewards/rejected": -0.585746705532074, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3139717425431711, |
|
"grad_norm": 7.900278401061318, |
|
"learning_rate": 4.3344075855595097e-07, |
|
"logits/chosen": -1.2308932542800903, |
|
"logits/rejected": -1.2276499271392822, |
|
"logps/chosen": -320.3226318359375, |
|
"logps/rejected": -326.42791748046875, |
|
"loss": 0.582, |
|
"rewards/accuracies": 0.731249988079071, |
|
"rewards/chosen": -0.39666932821273804, |
|
"rewards/margins": 0.3853836953639984, |
|
"rewards/rejected": -0.7820531129837036, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3349031920460492, |
|
"grad_norm": 7.301380914088517, |
|
"learning_rate": 4.2052190435769554e-07, |
|
"logits/chosen": -1.2494581937789917, |
|
"logits/rejected": -1.2624256610870361, |
|
"logps/chosen": -312.92584228515625, |
|
"logps/rejected": -334.1939697265625, |
|
"loss": 0.5702, |
|
"rewards/accuracies": 0.71875, |
|
"rewards/chosen": -0.33961960673332214, |
|
"rewards/margins": 0.45542359352111816, |
|
"rewards/rejected": -0.7950432300567627, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.35583464154892724, |
|
"grad_norm": 6.951907491069847, |
|
"learning_rate": 4.0668899744407567e-07, |
|
"logits/chosen": -1.1752383708953857, |
|
"logits/rejected": -1.176161766052246, |
|
"logps/chosen": -305.71905517578125, |
|
"logps/rejected": -318.2366638183594, |
|
"loss": 0.5717, |
|
"rewards/accuracies": 0.6812499761581421, |
|
"rewards/chosen": -0.44680055975914, |
|
"rewards/margins": 0.37692078948020935, |
|
"rewards/rejected": -0.8237212896347046, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.37676609105180536, |
|
"grad_norm": 7.66796629301651, |
|
"learning_rate": 3.920161866827889e-07, |
|
"logits/chosen": -1.2374298572540283, |
|
"logits/rejected": -1.2232410907745361, |
|
"logps/chosen": -314.8545227050781, |
|
"logps/rejected": -330.78082275390625, |
|
"loss": 0.5482, |
|
"rewards/accuracies": 0.6937500238418579, |
|
"rewards/chosen": -0.42410340905189514, |
|
"rewards/margins": 0.39998775720596313, |
|
"rewards/rejected": -0.8240911364555359, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3976975405546834, |
|
"grad_norm": 9.347951343579599, |
|
"learning_rate": 3.765821230985757e-07, |
|
"logits/chosen": -1.2337291240692139, |
|
"logits/rejected": -1.2184447050094604, |
|
"logps/chosen": -306.25042724609375, |
|
"logps/rejected": -344.9998474121094, |
|
"loss": 0.5552, |
|
"rewards/accuracies": 0.7124999761581421, |
|
"rewards/chosen": -0.4029362201690674, |
|
"rewards/margins": 0.4247189164161682, |
|
"rewards/rejected": -0.8276551365852356, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4186289900575615, |
|
"grad_norm": 20.34015507630146, |
|
"learning_rate": 3.604695382782159e-07, |
|
"logits/chosen": -1.1851434707641602, |
|
"logits/rejected": -1.2023327350616455, |
|
"logps/chosen": -317.30950927734375, |
|
"logps/rejected": -374.68707275390625, |
|
"loss": 0.565, |
|
"rewards/accuracies": 0.668749988079071, |
|
"rewards/chosen": -0.6525951027870178, |
|
"rewards/margins": 0.36915016174316406, |
|
"rewards/rejected": -1.021745204925537, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.43956043956043955, |
|
"grad_norm": 8.66046401969586, |
|
"learning_rate": 3.4376480090239047e-07, |
|
"logits/chosen": -1.2199736833572388, |
|
"logits/rejected": -1.2007920742034912, |
|
"logps/chosen": -373.19403076171875, |
|
"logps/rejected": -374.0740051269531, |
|
"loss": 0.5549, |
|
"rewards/accuracies": 0.731249988079071, |
|
"rewards/chosen": -0.6114795804023743, |
|
"rewards/margins": 0.5353660583496094, |
|
"rewards/rejected": -1.1468455791473389, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4604918890633176, |
|
"grad_norm": 10.308255360757213, |
|
"learning_rate": 3.265574537815398e-07, |
|
"logits/chosen": -1.18425714969635, |
|
"logits/rejected": -1.211212396621704, |
|
"logps/chosen": -276.52880859375, |
|
"logps/rejected": -339.9971008300781, |
|
"loss": 0.5522, |
|
"rewards/accuracies": 0.7250000238418579, |
|
"rewards/chosen": -0.42873048782348633, |
|
"rewards/margins": 0.5462521910667419, |
|
"rewards/rejected": -0.9749826192855835, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.48142333856619574, |
|
"grad_norm": 12.741083745076377, |
|
"learning_rate": 3.0893973387735683e-07, |
|
"logits/chosen": -1.2262427806854248, |
|
"logits/rejected": -1.2177931070327759, |
|
"logps/chosen": -330.7462158203125, |
|
"logps/rejected": -376.4158630371094, |
|
"loss": 0.555, |
|
"rewards/accuracies": 0.793749988079071, |
|
"rewards/chosen": -0.7175010442733765, |
|
"rewards/margins": 0.6692829132080078, |
|
"rewards/rejected": -1.3867839574813843, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5023547880690737, |
|
"grad_norm": 9.964168424713547, |
|
"learning_rate": 2.910060778827554e-07, |
|
"logits/chosen": -1.229190468788147, |
|
"logits/rejected": -1.1953736543655396, |
|
"logps/chosen": -340.6610412597656, |
|
"logps/rejected": -378.64459228515625, |
|
"loss": 0.5203, |
|
"rewards/accuracies": 0.71875, |
|
"rewards/chosen": -0.5589435696601868, |
|
"rewards/margins": 0.5544083714485168, |
|
"rewards/rejected": -1.1133520603179932, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5232862375719518, |
|
"grad_norm": 9.304678892746303, |
|
"learning_rate": 2.7285261601056697e-07, |
|
"logits/chosen": -1.237926959991455, |
|
"logits/rejected": -1.202215313911438, |
|
"logps/chosen": -335.1034240722656, |
|
"logps/rejected": -362.30694580078125, |
|
"loss": 0.5384, |
|
"rewards/accuracies": 0.7749999761581421, |
|
"rewards/chosen": -0.5443297624588013, |
|
"rewards/margins": 0.6573392748832703, |
|
"rewards/rejected": -1.2016689777374268, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.54421768707483, |
|
"grad_norm": 11.08571008923917, |
|
"learning_rate": 2.5457665670441937e-07, |
|
"logits/chosen": -1.2713990211486816, |
|
"logits/rejected": -1.28865385055542, |
|
"logps/chosen": -339.6799011230469, |
|
"logps/rejected": -381.6545104980469, |
|
"loss": 0.5364, |
|
"rewards/accuracies": 0.71875, |
|
"rewards/chosen": -0.793778121471405, |
|
"rewards/margins": 0.5738049745559692, |
|
"rewards/rejected": -1.367583155632019, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.565149136577708, |
|
"grad_norm": 10.522304727677207, |
|
"learning_rate": 2.3627616503391812e-07, |
|
"logits/chosen": -1.226070761680603, |
|
"logits/rejected": -1.224416732788086, |
|
"logps/chosen": -342.8006591796875, |
|
"logps/rejected": -384.75091552734375, |
|
"loss": 0.5349, |
|
"rewards/accuracies": 0.7250000238418579, |
|
"rewards/chosen": -0.4287544786930084, |
|
"rewards/margins": 0.7136284112930298, |
|
"rewards/rejected": -1.1423828601837158, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5860805860805861, |
|
"grad_norm": 10.244786469121673, |
|
"learning_rate": 2.1804923757009882e-07, |
|
"logits/chosen": -1.1765474081039429, |
|
"logits/rejected": -1.1918376684188843, |
|
"logps/chosen": -294.80682373046875, |
|
"logps/rejected": -329.14312744140625, |
|
"loss": 0.5451, |
|
"rewards/accuracies": 0.6875, |
|
"rewards/chosen": -0.3686367869377136, |
|
"rewards/margins": 0.5905336737632751, |
|
"rewards/rejected": -0.9591705203056335, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6070120355834642, |
|
"grad_norm": 9.226838895321402, |
|
"learning_rate": 1.9999357655598891e-07, |
|
"logits/chosen": -1.2361958026885986, |
|
"logits/rejected": -1.2049939632415771, |
|
"logps/chosen": -316.0486145019531, |
|
"logps/rejected": -381.14862060546875, |
|
"loss": 0.5398, |
|
"rewards/accuracies": 0.71875, |
|
"rewards/chosen": -0.6723560690879822, |
|
"rewards/margins": 0.6183323860168457, |
|
"rewards/rejected": -1.2906882762908936, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6279434850863422, |
|
"grad_norm": 8.816959226535186, |
|
"learning_rate": 1.8220596619089573e-07, |
|
"logits/chosen": -1.209092140197754, |
|
"logits/rejected": -1.1889410018920898, |
|
"logps/chosen": -401.56768798828125, |
|
"logps/rejected": -416.4493713378906, |
|
"loss": 0.5146, |
|
"rewards/accuracies": 0.706250011920929, |
|
"rewards/chosen": -0.7396571040153503, |
|
"rewards/margins": 0.6182384490966797, |
|
"rewards/rejected": -1.3578956127166748, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6488749345892203, |
|
"grad_norm": 8.036248628388176, |
|
"learning_rate": 1.647817538357072e-07, |
|
"logits/chosen": -1.1924922466278076, |
|
"logits/rejected": -1.1694473028182983, |
|
"logps/chosen": -372.14892578125, |
|
"logps/rejected": -391.8318786621094, |
|
"loss": 0.5116, |
|
"rewards/accuracies": 0.7875000238418579, |
|
"rewards/chosen": -0.6560718417167664, |
|
"rewards/margins": 0.7323848009109497, |
|
"rewards/rejected": -1.3884565830230713, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6698063840920984, |
|
"grad_norm": 11.007790934706696, |
|
"learning_rate": 1.478143389201113e-07, |
|
"logits/chosen": -1.2555673122406006, |
|
"logits/rejected": -1.2262015342712402, |
|
"logps/chosen": -311.16534423828125, |
|
"logps/rejected": -354.5485534667969, |
|
"loss": 0.5076, |
|
"rewards/accuracies": 0.78125, |
|
"rewards/chosen": -0.5307902097702026, |
|
"rewards/margins": 0.7380278706550598, |
|
"rewards/rejected": -1.2688181400299072, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6907378335949764, |
|
"grad_norm": 10.593080489103407, |
|
"learning_rate": 1.3139467229135998e-07, |
|
"logits/chosen": -1.2314331531524658, |
|
"logits/rejected": -1.2257896661758423, |
|
"logps/chosen": -330.34393310546875, |
|
"logps/rejected": -386.2178039550781, |
|
"loss": 0.5312, |
|
"rewards/accuracies": 0.768750011920929, |
|
"rewards/chosen": -0.5327134728431702, |
|
"rewards/margins": 0.6293205618858337, |
|
"rewards/rejected": -1.162034034729004, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7116692830978545, |
|
"grad_norm": 9.439948657937165, |
|
"learning_rate": 1.1561076868822755e-07, |
|
"logits/chosen": -1.15424644947052, |
|
"logits/rejected": -1.16172456741333, |
|
"logps/chosen": -363.83575439453125, |
|
"logps/rejected": -402.63494873046875, |
|
"loss": 0.5245, |
|
"rewards/accuracies": 0.6875, |
|
"rewards/chosen": -0.6764324307441711, |
|
"rewards/margins": 0.6598538756370544, |
|
"rewards/rejected": -1.3362863063812256, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7326007326007326, |
|
"grad_norm": 9.012621184513685, |
|
"learning_rate": 1.0054723495346482e-07, |
|
"logits/chosen": -1.241715908050537, |
|
"logits/rejected": -1.2342867851257324, |
|
"logps/chosen": -327.93231201171875, |
|
"logps/rejected": -370.9751892089844, |
|
"loss": 0.5044, |
|
"rewards/accuracies": 0.7875000238418579, |
|
"rewards/chosen": -0.6473630666732788, |
|
"rewards/margins": 0.722842812538147, |
|
"rewards/rejected": -1.3702059984207153, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7535321821036107, |
|
"grad_norm": 9.66579260075727, |
|
"learning_rate": 8.628481651367875e-08, |
|
"logits/chosen": -1.2246743440628052, |
|
"logits/rejected": -1.2141767740249634, |
|
"logps/chosen": -381.3811950683594, |
|
"logps/rejected": -412.919921875, |
|
"loss": 0.5388, |
|
"rewards/accuracies": 0.768750011920929, |
|
"rewards/chosen": -0.6807909607887268, |
|
"rewards/margins": 0.7211862802505493, |
|
"rewards/rejected": -1.4019771814346313, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7744636316064888, |
|
"grad_norm": 8.633284456509573, |
|
"learning_rate": 7.289996455765748e-08, |
|
"logits/chosen": -1.2712879180908203, |
|
"logits/rejected": -1.254762053489685, |
|
"logps/chosen": -322.0973205566406, |
|
"logps/rejected": -358.21942138671875, |
|
"loss": 0.5202, |
|
"rewards/accuracies": 0.7562500238418579, |
|
"rewards/chosen": -0.5800286531448364, |
|
"rewards/margins": 0.6660745739936829, |
|
"rewards/rejected": -1.246103286743164, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7953950811093669, |
|
"grad_norm": 9.854809153275779, |
|
"learning_rate": 6.046442623320145e-08, |
|
"logits/chosen": -1.1864761114120483, |
|
"logits/rejected": -1.1942225694656372, |
|
"logps/chosen": -318.14453125, |
|
"logps/rejected": -416.2574768066406, |
|
"loss": 0.5224, |
|
"rewards/accuracies": 0.7250000238418579, |
|
"rewards/chosen": -0.7182336449623108, |
|
"rewards/margins": 0.8087763786315918, |
|
"rewards/rejected": -1.5270097255706787, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8163265306122449, |
|
"grad_norm": 9.683226914416851, |
|
"learning_rate": 4.904486005914027e-08, |
|
"logits/chosen": -1.2469178438186646, |
|
"logits/rejected": -1.2045327425003052, |
|
"logps/chosen": -398.12994384765625, |
|
"logps/rejected": -434.2908630371094, |
|
"loss": 0.5144, |
|
"rewards/accuracies": 0.737500011920929, |
|
"rewards/chosen": -0.6139215230941772, |
|
"rewards/margins": 0.6766796112060547, |
|
"rewards/rejected": -1.290601134300232, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.837257980115123, |
|
"grad_norm": 10.323841015921616, |
|
"learning_rate": 3.8702478614051345e-08, |
|
"logits/chosen": -1.2089258432388306, |
|
"logits/rejected": -1.2096412181854248, |
|
"logps/chosen": -322.91912841796875, |
|
"logps/rejected": -366.2901611328125, |
|
"loss": 0.5247, |
|
"rewards/accuracies": 0.75, |
|
"rewards/chosen": -0.593218982219696, |
|
"rewards/margins": 0.6565582752227783, |
|
"rewards/rejected": -1.2497771978378296, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.858189429618001, |
|
"grad_norm": 8.925114203702107, |
|
"learning_rate": 2.9492720416985e-08, |
|
"logits/chosen": -1.2606794834136963, |
|
"logits/rejected": -1.2504216432571411, |
|
"logps/chosen": -372.19635009765625, |
|
"logps/rejected": -403.69183349609375, |
|
"loss": 0.5278, |
|
"rewards/accuracies": 0.762499988079071, |
|
"rewards/chosen": -0.6300618052482605, |
|
"rewards/margins": 0.7462152242660522, |
|
"rewards/rejected": -1.3762768507003784, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8791208791208791, |
|
"grad_norm": 7.917460029763406, |
|
"learning_rate": 2.1464952759020856e-08, |
|
"logits/chosen": -1.1264762878417969, |
|
"logits/rejected": -1.1589303016662598, |
|
"logps/chosen": -339.3796691894531, |
|
"logps/rejected": -417.29217529296875, |
|
"loss": 0.5154, |
|
"rewards/accuracies": 0.7437499761581421, |
|
"rewards/chosen": -0.7496095895767212, |
|
"rewards/margins": 0.7165287137031555, |
|
"rewards/rejected": -1.466138243675232, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.9000523286237572, |
|
"grad_norm": 8.226733820840508, |
|
"learning_rate": 1.4662207078575684e-08, |
|
"logits/chosen": -1.1936299800872803, |
|
"logits/rejected": -1.1583229303359985, |
|
"logps/chosen": -357.48248291015625, |
|
"logps/rejected": -410.64349365234375, |
|
"loss": 0.4945, |
|
"rewards/accuracies": 0.762499988079071, |
|
"rewards/chosen": -0.6429892778396606, |
|
"rewards/margins": 0.7604572772979736, |
|
"rewards/rejected": -1.4034465551376343, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9209837781266352, |
|
"grad_norm": 9.626312612509446, |
|
"learning_rate": 9.12094829893642e-09, |
|
"logits/chosen": -1.2455865144729614, |
|
"logits/rejected": -1.222106695175171, |
|
"logps/chosen": -331.60198974609375, |
|
"logps/rejected": -350.9930419921875, |
|
"loss": 0.5179, |
|
"rewards/accuracies": 0.6625000238418579, |
|
"rewards/chosen": -0.6523444056510925, |
|
"rewards/margins": 0.5979769825935364, |
|
"rewards/rejected": -1.250321388244629, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9419152276295133, |
|
"grad_norm": 9.381121387990033, |
|
"learning_rate": 4.8708793644441086e-09, |
|
"logits/chosen": -1.1223390102386475, |
|
"logits/rejected": -1.1296846866607666, |
|
"logps/chosen": -343.0498962402344, |
|
"logps/rejected": -405.4967041015625, |
|
"loss": 0.5146, |
|
"rewards/accuracies": 0.75, |
|
"rewards/chosen": -0.70673668384552, |
|
"rewards/margins": 0.7429010272026062, |
|
"rewards/rejected": -1.449637770652771, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9628466771323915, |
|
"grad_norm": 10.172974951790811, |
|
"learning_rate": 1.9347820230782295e-09, |
|
"logits/chosen": -1.1808750629425049, |
|
"logits/rejected": -1.1635867357254028, |
|
"logps/chosen": -334.4978942871094, |
|
"logps/rejected": -361.5756530761719, |
|
"loss": 0.5223, |
|
"rewards/accuracies": 0.7562500238418579, |
|
"rewards/chosen": -0.6034250259399414, |
|
"rewards/margins": 0.697030782699585, |
|
"rewards/rejected": -1.3004556894302368, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9837781266352695, |
|
"grad_norm": 9.398707959258918, |
|
"learning_rate": 3.2839470889836627e-10, |
|
"logits/chosen": -1.2121164798736572, |
|
"logits/rejected": -1.1964446306228638, |
|
"logps/chosen": -357.72991943359375, |
|
"logps/rejected": -408.731689453125, |
|
"loss": 0.5015, |
|
"rewards/accuracies": 0.699999988079071, |
|
"rewards/chosen": -0.605668842792511, |
|
"rewards/margins": 0.6676869988441467, |
|
"rewards/rejected": -1.2733557224273682, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9984301412872841, |
|
"eval_logits/chosen": -1.218731164932251, |
|
"eval_logits/rejected": -1.20514976978302, |
|
"eval_logps/chosen": -331.9158630371094, |
|
"eval_logps/rejected": -413.128173828125, |
|
"eval_loss": 0.524560809135437, |
|
"eval_rewards/accuracies": 0.7890625, |
|
"eval_rewards/chosen": -0.5787502527236938, |
|
"eval_rewards/margins": 0.7770635485649109, |
|
"eval_rewards/rejected": -1.35581374168396, |
|
"eval_runtime": 228.1084, |
|
"eval_samples_per_second": 8.768, |
|
"eval_steps_per_second": 0.14, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.9984301412872841, |
|
"step": 477, |
|
"total_flos": 0.0, |
|
"train_loss": 0.5676461645642167, |
|
"train_runtime": 13701.4312, |
|
"train_samples_per_second": 4.462, |
|
"train_steps_per_second": 0.035 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 477, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|