|
{ |
|
"best_metric": 0.7657567262649536, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.08241758241758242, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0005494505494505495, |
|
"grad_norm": 10.20637035369873, |
|
"learning_rate": 1e-05, |
|
"loss": 1.7034, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0005494505494505495, |
|
"eval_loss": 2.4982120990753174, |
|
"eval_runtime": 246.1723, |
|
"eval_samples_per_second": 12.455, |
|
"eval_steps_per_second": 3.116, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001098901098901099, |
|
"grad_norm": 11.44153118133545, |
|
"learning_rate": 2e-05, |
|
"loss": 2.029, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0016483516483516484, |
|
"grad_norm": 11.39004135131836, |
|
"learning_rate": 3e-05, |
|
"loss": 1.9351, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.002197802197802198, |
|
"grad_norm": 12.599936485290527, |
|
"learning_rate": 4e-05, |
|
"loss": 1.6733, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0027472527472527475, |
|
"grad_norm": 9.226388931274414, |
|
"learning_rate": 5e-05, |
|
"loss": 1.5285, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0032967032967032967, |
|
"grad_norm": 7.024893283843994, |
|
"learning_rate": 6e-05, |
|
"loss": 1.147, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0038461538461538464, |
|
"grad_norm": 8.669913291931152, |
|
"learning_rate": 7e-05, |
|
"loss": 1.2284, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.004395604395604396, |
|
"grad_norm": 7.4318156242370605, |
|
"learning_rate": 8e-05, |
|
"loss": 0.9729, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.004945054945054945, |
|
"grad_norm": 7.1173996925354, |
|
"learning_rate": 9e-05, |
|
"loss": 0.9166, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.005494505494505495, |
|
"grad_norm": 7.444589138031006, |
|
"learning_rate": 0.0001, |
|
"loss": 1.1277, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.006043956043956044, |
|
"grad_norm": 6.673343658447266, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 0.9009, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.006593406593406593, |
|
"grad_norm": 5.152169704437256, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 0.9832, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.007142857142857143, |
|
"grad_norm": 5.281370639801025, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 0.8636, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.007692307692307693, |
|
"grad_norm": 4.680879592895508, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 0.8871, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.008241758241758242, |
|
"grad_norm": 4.400333881378174, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 0.9835, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.008791208791208791, |
|
"grad_norm": 5.351457595825195, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 0.8755, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.00934065934065934, |
|
"grad_norm": 4.729643821716309, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 0.9769, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.00989010989010989, |
|
"grad_norm": 4.137618064880371, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 0.846, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.010439560439560439, |
|
"grad_norm": 4.284829139709473, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 0.8022, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01098901098901099, |
|
"grad_norm": 5.623097896575928, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.8643, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.011538461538461539, |
|
"grad_norm": 4.570623397827148, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 0.8482, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.012087912087912088, |
|
"grad_norm": 5.434322357177734, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 0.9303, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.012637362637362638, |
|
"grad_norm": 5.448188781738281, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 1.0636, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.013186813186813187, |
|
"grad_norm": 5.108726501464844, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 1.056, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.013736263736263736, |
|
"grad_norm": 4.536986351013184, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 0.9214, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.014285714285714285, |
|
"grad_norm": 4.402527809143066, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 0.712, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.014835164835164835, |
|
"grad_norm": 4.430077075958252, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 0.9547, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.015384615384615385, |
|
"grad_norm": 3.4363932609558105, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.7263, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.015934065934065933, |
|
"grad_norm": 4.152787685394287, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.7906, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.016483516483516484, |
|
"grad_norm": 4.174340724945068, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 0.9927, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01703296703296703, |
|
"grad_norm": 4.010305881500244, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 0.9093, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.017582417582417582, |
|
"grad_norm": 3.8751678466796875, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 0.7975, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.018131868131868133, |
|
"grad_norm": 3.810492992401123, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.8229, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.01868131868131868, |
|
"grad_norm": 3.694744110107422, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 0.8752, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.019230769230769232, |
|
"grad_norm": 3.96274733543396, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 0.6688, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01978021978021978, |
|
"grad_norm": 4.120517730712891, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 0.747, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02032967032967033, |
|
"grad_norm": 4.018519878387451, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 0.6797, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.020879120879120878, |
|
"grad_norm": 4.266332626342773, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 0.8611, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02142857142857143, |
|
"grad_norm": 4.074350833892822, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 0.7577, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02197802197802198, |
|
"grad_norm": 3.547234535217285, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.808, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.022527472527472527, |
|
"grad_norm": 4.03273344039917, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 0.7009, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.023076923076923078, |
|
"grad_norm": 5.0143537521362305, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.9845, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.023626373626373626, |
|
"grad_norm": 3.6202425956726074, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 0.8286, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.024175824175824177, |
|
"grad_norm": 3.7478301525115967, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 0.7181, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.024725274725274724, |
|
"grad_norm": 3.7263989448547363, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 0.6388, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.025274725274725275, |
|
"grad_norm": 4.423315525054932, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 0.7229, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.025824175824175823, |
|
"grad_norm": 3.4380674362182617, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 0.6488, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.026373626373626374, |
|
"grad_norm": 3.32068133354187, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.6034, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.026923076923076925, |
|
"grad_norm": 3.828432559967041, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 0.646, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.027472527472527472, |
|
"grad_norm": 4.410627365112305, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 0.7029, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.027472527472527472, |
|
"eval_loss": 0.8551605939865112, |
|
"eval_runtime": 248.255, |
|
"eval_samples_per_second": 12.35, |
|
"eval_steps_per_second": 3.09, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.028021978021978023, |
|
"grad_norm": 4.3182268142700195, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 0.9381, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.02857142857142857, |
|
"grad_norm": 3.6858444213867188, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 1.0042, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.02912087912087912, |
|
"grad_norm": 3.4486730098724365, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 0.9807, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.02967032967032967, |
|
"grad_norm": 4.263784885406494, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 0.7938, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.03021978021978022, |
|
"grad_norm": 3.1360929012298584, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 0.7528, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03076923076923077, |
|
"grad_norm": 3.7347052097320557, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.8403, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.03131868131868132, |
|
"grad_norm": 3.3182191848754883, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.8076, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.031868131868131866, |
|
"grad_norm": 2.980205774307251, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.7868, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03241758241758242, |
|
"grad_norm": 3.3988683223724365, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.8072, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03296703296703297, |
|
"grad_norm": 3.52750301361084, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.8784, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.033516483516483515, |
|
"grad_norm": 3.604893684387207, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.7565, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03406593406593406, |
|
"grad_norm": 3.4089083671569824, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.8974, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.03461538461538462, |
|
"grad_norm": 4.178040504455566, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.868, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.035164835164835165, |
|
"grad_norm": 3.4511518478393555, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 1.0132, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.03571428571428571, |
|
"grad_norm": 3.3299479484558105, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 0.8356, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03626373626373627, |
|
"grad_norm": 3.3526644706726074, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 0.8513, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.036813186813186814, |
|
"grad_norm": 3.367147922515869, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 1.0042, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.03736263736263736, |
|
"grad_norm": 3.2283849716186523, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.8084, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.03791208791208791, |
|
"grad_norm": 3.1080996990203857, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.9091, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.038461538461538464, |
|
"grad_norm": 3.440809488296509, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.912, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03901098901098901, |
|
"grad_norm": 4.543209552764893, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.9512, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.03956043956043956, |
|
"grad_norm": 3.6167526245117188, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 0.7516, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.04010989010989011, |
|
"grad_norm": 4.1947479248046875, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 0.9431, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.04065934065934066, |
|
"grad_norm": 3.461369752883911, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.8908, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04120879120879121, |
|
"grad_norm": 3.3791346549987793, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.7087, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.041758241758241756, |
|
"grad_norm": 3.541050434112549, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.7779, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.04230769230769231, |
|
"grad_norm": 3.108454465866089, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.8149, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.04285714285714286, |
|
"grad_norm": 3.772913932800293, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.7668, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.043406593406593405, |
|
"grad_norm": 3.0966885089874268, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.8225, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.04395604395604396, |
|
"grad_norm": 3.7791407108306885, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.8693, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04450549450549451, |
|
"grad_norm": 4.253763198852539, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 1.0137, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.045054945054945054, |
|
"grad_norm": 3.970787525177002, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 1.0145, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.0456043956043956, |
|
"grad_norm": 3.273193597793579, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.8402, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.046153846153846156, |
|
"grad_norm": 3.246183156967163, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.8381, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.046703296703296704, |
|
"grad_norm": 3.3997747898101807, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 0.9744, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04725274725274725, |
|
"grad_norm": 3.4663801193237305, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.9223, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.0478021978021978, |
|
"grad_norm": 3.421869993209839, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 0.9882, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.04835164835164835, |
|
"grad_norm": 2.5848114490509033, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 0.6518, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.0489010989010989, |
|
"grad_norm": 4.04249382019043, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 0.8867, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.04945054945054945, |
|
"grad_norm": 3.6130316257476807, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.8811, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.202869176864624, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.6812, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.05054945054945055, |
|
"grad_norm": 5.957513332366943, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.8074, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.0510989010989011, |
|
"grad_norm": 9.279909133911133, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 0.7514, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.051648351648351645, |
|
"grad_norm": 3.816479206085205, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 0.7612, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.0521978021978022, |
|
"grad_norm": 3.6703317165374756, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 0.6952, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05274725274725275, |
|
"grad_norm": 2.994513988494873, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 0.5601, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.053296703296703295, |
|
"grad_norm": 3.6805593967437744, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 0.7776, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.05384615384615385, |
|
"grad_norm": 3.7631995677948, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 0.6875, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.0543956043956044, |
|
"grad_norm": 4.067095756530762, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 0.6078, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.054945054945054944, |
|
"grad_norm": 3.9707226753234863, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 0.5355, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.054945054945054944, |
|
"eval_loss": 0.810309112071991, |
|
"eval_runtime": 248.3207, |
|
"eval_samples_per_second": 12.347, |
|
"eval_steps_per_second": 3.089, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05549450549450549, |
|
"grad_norm": 2.7428088188171387, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 0.8487, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.056043956043956046, |
|
"grad_norm": 4.557298183441162, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 0.7637, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.056593406593406594, |
|
"grad_norm": 3.209634780883789, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 0.8463, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.05714285714285714, |
|
"grad_norm": 5.7075276374816895, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 0.7269, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.057692307692307696, |
|
"grad_norm": 3.590937376022339, |
|
"learning_rate": 5e-05, |
|
"loss": 0.9204, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05824175824175824, |
|
"grad_norm": 3.8173980712890625, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 0.9498, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.05879120879120879, |
|
"grad_norm": 3.6682701110839844, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 0.846, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.05934065934065934, |
|
"grad_norm": 3.0619313716888428, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 0.7405, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.05989010989010989, |
|
"grad_norm": 3.3937807083129883, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.8557, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.06043956043956044, |
|
"grad_norm": 3.5738933086395264, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.8423, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06098901098901099, |
|
"grad_norm": 3.2693288326263428, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 0.7375, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.06153846153846154, |
|
"grad_norm": 3.017500638961792, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.7283, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.06208791208791209, |
|
"grad_norm": 3.3882884979248047, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 0.949, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.06263736263736264, |
|
"grad_norm": 3.357642650604248, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.9024, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.06318681318681318, |
|
"grad_norm": 3.2038471698760986, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 0.7567, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06373626373626373, |
|
"grad_norm": 3.5930140018463135, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.847, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.06428571428571428, |
|
"grad_norm": 3.030806303024292, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.817, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.06483516483516484, |
|
"grad_norm": 3.528214693069458, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.8314, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.06538461538461539, |
|
"grad_norm": 3.717010736465454, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.8658, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.06593406593406594, |
|
"grad_norm": 3.4061574935913086, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.7038, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06648351648351648, |
|
"grad_norm": 2.5591681003570557, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.7427, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.06703296703296703, |
|
"grad_norm": 3.146589756011963, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.724, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.06758241758241758, |
|
"grad_norm": 3.002185344696045, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.6943, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.06813186813186813, |
|
"grad_norm": 3.108757257461548, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.8639, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.06868131868131869, |
|
"grad_norm": 3.310210704803467, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 0.9398, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.06923076923076923, |
|
"grad_norm": 3.308823347091675, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 0.788, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.06978021978021978, |
|
"grad_norm": 3.1850717067718506, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 0.7686, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.07032967032967033, |
|
"grad_norm": 3.594505786895752, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.7049, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.07087912087912088, |
|
"grad_norm": 3.527811288833618, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 0.9086, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.07142857142857142, |
|
"grad_norm": 3.708664894104004, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.8532, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07197802197802197, |
|
"grad_norm": 3.2360305786132812, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 0.6813, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.07252747252747253, |
|
"grad_norm": 4.264449596405029, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 0.958, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.07307692307692308, |
|
"grad_norm": 2.919825315475464, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 0.7465, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.07362637362637363, |
|
"grad_norm": 3.232802629470825, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 0.7196, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.07417582417582418, |
|
"grad_norm": 3.242577314376831, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.71, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07472527472527472, |
|
"grad_norm": 3.7792561054229736, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.8448, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.07527472527472527, |
|
"grad_norm": 3.8947784900665283, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 0.9169, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.07582417582417582, |
|
"grad_norm": 3.439913749694824, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 0.7157, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.07637362637362638, |
|
"grad_norm": 2.68253231048584, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 0.6558, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.07692307692307693, |
|
"grad_norm": 4.011876583099365, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 0.9641, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07747252747252747, |
|
"grad_norm": 4.138200759887695, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 0.7938, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.07802197802197802, |
|
"grad_norm": 4.425159454345703, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 0.9208, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.07857142857142857, |
|
"grad_norm": 3.966238498687744, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.8659, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.07912087912087912, |
|
"grad_norm": 4.205827236175537, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 0.8532, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.07967032967032966, |
|
"grad_norm": 3.454686403274536, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 0.7116, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08021978021978023, |
|
"grad_norm": 3.6158885955810547, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 0.8404, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.08076923076923077, |
|
"grad_norm": 4.816569805145264, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 1.0318, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.08131868131868132, |
|
"grad_norm": 4.617947101593018, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 0.7545, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.08186813186813187, |
|
"grad_norm": 2.717759132385254, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 0.5016, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.08241758241758242, |
|
"grad_norm": 3.2810633182525635, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 0.6501, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08241758241758242, |
|
"eval_loss": 0.7657567262649536, |
|
"eval_runtime": 248.2901, |
|
"eval_samples_per_second": 12.348, |
|
"eval_steps_per_second": 3.089, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.20339070042112e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|