{ "best_metric": null, "best_model_checkpoint": null, "epoch": 0.4059701492537313, "eval_steps": 500, "global_step": 136, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.0029850746268656717, "grad_norm": 8.253287036233855, "learning_rate": 9.090909090909091e-07, "loss": 2.2247, "step": 1 }, { "epoch": 0.005970149253731343, "grad_norm": 7.395570676912434, "learning_rate": 1.8181818181818183e-06, "loss": 2.1255, "step": 2 }, { "epoch": 0.008955223880597015, "grad_norm": 8.228093047465732, "learning_rate": 2.7272727272727272e-06, "loss": 2.1028, "step": 3 }, { "epoch": 0.011940298507462687, "grad_norm": 7.134217440402169, "learning_rate": 3.6363636363636366e-06, "loss": 2.0029, "step": 4 }, { "epoch": 0.014925373134328358, "grad_norm": 5.937798020763942, "learning_rate": 4.5454545454545455e-06, "loss": 1.907, "step": 5 }, { "epoch": 0.01791044776119403, "grad_norm": 5.559897081671283, "learning_rate": 5.4545454545454545e-06, "loss": 1.859, "step": 6 }, { "epoch": 0.020895522388059702, "grad_norm": 5.0306441137723485, "learning_rate": 6.363636363636364e-06, "loss": 1.8198, "step": 7 }, { "epoch": 0.023880597014925373, "grad_norm": 3.514372505025641, "learning_rate": 7.272727272727273e-06, "loss": 1.5112, "step": 8 }, { "epoch": 0.026865671641791045, "grad_norm": 3.1293116769998894, "learning_rate": 8.181818181818183e-06, "loss": 1.4412, "step": 9 }, { "epoch": 0.029850746268656716, "grad_norm": 2.7656396768471456, "learning_rate": 9.090909090909091e-06, "loss": 1.4058, "step": 10 }, { "epoch": 0.03283582089552239, "grad_norm": 5.352646410247892, "learning_rate": 1e-05, "loss": 1.4371, "step": 11 }, { "epoch": 0.03582089552238806, "grad_norm": 4.381930202952016, "learning_rate": 9.99976495753613e-06, "loss": 1.4176, "step": 12 }, { "epoch": 0.03880597014925373, "grad_norm": 3.1514253661202765, "learning_rate": 9.999059852242508e-06, "loss": 1.2973, "step": 13 }, { "epoch": 0.041791044776119404, "grad_norm": 2.6485142204402696, "learning_rate": 9.997884750411004e-06, "loss": 1.1784, "step": 14 }, { "epoch": 0.04477611940298507, "grad_norm": 2.7946518315041007, "learning_rate": 9.996239762521152e-06, "loss": 1.3108, "step": 15 }, { "epoch": 0.04776119402985075, "grad_norm": 2.6114018043960003, "learning_rate": 9.994125043229753e-06, "loss": 1.102, "step": 16 }, { "epoch": 0.050746268656716415, "grad_norm": 2.3027638181918495, "learning_rate": 9.991540791356342e-06, "loss": 1.0699, "step": 17 }, { "epoch": 0.05373134328358209, "grad_norm": 2.204185879725156, "learning_rate": 9.98848724986449e-06, "loss": 1.1473, "step": 18 }, { "epoch": 0.056716417910447764, "grad_norm": 2.2090761147091817, "learning_rate": 9.98496470583896e-06, "loss": 1.1816, "step": 19 }, { "epoch": 0.05970149253731343, "grad_norm": 2.0223479706782737, "learning_rate": 9.980973490458728e-06, "loss": 1.1217, "step": 20 }, { "epoch": 0.0626865671641791, "grad_norm": 2.080473927553376, "learning_rate": 9.976513978965829e-06, "loss": 1.0251, "step": 21 }, { "epoch": 0.06567164179104477, "grad_norm": 2.3488846800450744, "learning_rate": 9.971586590630094e-06, "loss": 1.0279, "step": 22 }, { "epoch": 0.06865671641791045, "grad_norm": 2.1532960090217212, "learning_rate": 9.966191788709716e-06, "loss": 1.0126, "step": 23 }, { "epoch": 0.07164179104477612, "grad_norm": 2.055129377186085, "learning_rate": 9.960330080407712e-06, "loss": 1.0072, "step": 24 }, { "epoch": 0.07462686567164178, "grad_norm": 2.156877426415811, "learning_rate": 9.954002016824226e-06, "loss": 1.0349, "step": 25 }, { "epoch": 0.07761194029850746, "grad_norm": 1.9914221696004062, "learning_rate": 9.947208192904722e-06, "loss": 1.0244, "step": 26 }, { "epoch": 0.08059701492537313, "grad_norm": 1.9887204088063186, "learning_rate": 9.939949247384046e-06, "loss": 0.9912, "step": 27 }, { "epoch": 0.08358208955223881, "grad_norm": 2.110799590941847, "learning_rate": 9.93222586272637e-06, "loss": 1.0377, "step": 28 }, { "epoch": 0.08656716417910448, "grad_norm": 2.0583073599588473, "learning_rate": 9.924038765061042e-06, "loss": 1.0139, "step": 29 }, { "epoch": 0.08955223880597014, "grad_norm": 2.1723065119138196, "learning_rate": 9.915388724114301e-06, "loss": 0.9376, "step": 30 }, { "epoch": 0.09253731343283582, "grad_norm": 2.064005981064233, "learning_rate": 9.906276553136924e-06, "loss": 1.0226, "step": 31 }, { "epoch": 0.0955223880597015, "grad_norm": 2.122144912833922, "learning_rate": 9.896703108827758e-06, "loss": 0.9483, "step": 32 }, { "epoch": 0.09850746268656717, "grad_norm": 2.1876975651489246, "learning_rate": 9.886669291253178e-06, "loss": 0.8942, "step": 33 }, { "epoch": 0.10149253731343283, "grad_norm": 1.9606368757720636, "learning_rate": 9.876176043762467e-06, "loss": 0.885, "step": 34 }, { "epoch": 0.1044776119402985, "grad_norm": 2.27926831852693, "learning_rate": 9.86522435289912e-06, "loss": 0.9491, "step": 35 }, { "epoch": 0.10746268656716418, "grad_norm": 2.030550748336965, "learning_rate": 9.853815248308101e-06, "loss": 0.982, "step": 36 }, { "epoch": 0.11044776119402985, "grad_norm": 2.0075365017841995, "learning_rate": 9.841949802639031e-06, "loss": 0.9843, "step": 37 }, { "epoch": 0.11343283582089553, "grad_norm": 2.459727689278767, "learning_rate": 9.829629131445342e-06, "loss": 0.8796, "step": 38 }, { "epoch": 0.11641791044776119, "grad_norm": 1.980360220017672, "learning_rate": 9.816854393079402e-06, "loss": 0.8793, "step": 39 }, { "epoch": 0.11940298507462686, "grad_norm": 2.0803429090292433, "learning_rate": 9.803626788583603e-06, "loss": 0.8749, "step": 40 }, { "epoch": 0.12238805970149254, "grad_norm": 2.0518013006773446, "learning_rate": 9.789947561577445e-06, "loss": 0.9123, "step": 41 }, { "epoch": 0.1253731343283582, "grad_norm": 2.0868313477992695, "learning_rate": 9.775817998140615e-06, "loss": 0.884, "step": 42 }, { "epoch": 0.12835820895522387, "grad_norm": 2.133757511141339, "learning_rate": 9.761239426692077e-06, "loss": 0.8846, "step": 43 }, { "epoch": 0.13134328358208955, "grad_norm": 2.5042273900185332, "learning_rate": 9.74621321786517e-06, "loss": 0.9755, "step": 44 }, { "epoch": 0.13432835820895522, "grad_norm": 1.9478417312433638, "learning_rate": 9.730740784378755e-06, "loss": 0.8857, "step": 45 }, { "epoch": 0.1373134328358209, "grad_norm": 1.9676701142325428, "learning_rate": 9.71482358090438e-06, "loss": 0.8642, "step": 46 }, { "epoch": 0.14029850746268657, "grad_norm": 1.920433325570768, "learning_rate": 9.698463103929542e-06, "loss": 0.8977, "step": 47 }, { "epoch": 0.14328358208955225, "grad_norm": 1.9460499143315086, "learning_rate": 9.681660891616967e-06, "loss": 0.902, "step": 48 }, { "epoch": 0.14626865671641792, "grad_norm": 1.968155074678708, "learning_rate": 9.664418523660004e-06, "loss": 0.9046, "step": 49 }, { "epoch": 0.14925373134328357, "grad_norm": 1.9490820429685933, "learning_rate": 9.646737621134112e-06, "loss": 0.943, "step": 50 }, { "epoch": 0.15223880597014924, "grad_norm": 2.08429080101051, "learning_rate": 9.628619846344453e-06, "loss": 0.9337, "step": 51 }, { "epoch": 0.15522388059701492, "grad_norm": 1.930901797857114, "learning_rate": 9.610066902669593e-06, "loss": 0.8928, "step": 52 }, { "epoch": 0.1582089552238806, "grad_norm": 1.9013217162350948, "learning_rate": 9.591080534401371e-06, "loss": 0.8905, "step": 53 }, { "epoch": 0.16119402985074627, "grad_norm": 1.9982220077949842, "learning_rate": 9.571662526580898e-06, "loss": 0.8843, "step": 54 }, { "epoch": 0.16417910447761194, "grad_norm": 1.9851369521318882, "learning_rate": 9.551814704830734e-06, "loss": 0.935, "step": 55 }, { "epoch": 0.16716417910447762, "grad_norm": 2.1245353835937437, "learning_rate": 9.531538935183252e-06, "loss": 0.8666, "step": 56 }, { "epoch": 0.1701492537313433, "grad_norm": 2.0723319083497245, "learning_rate": 9.51083712390519e-06, "loss": 0.9155, "step": 57 }, { "epoch": 0.17313432835820897, "grad_norm": 1.7064471958774803, "learning_rate": 9.48971121731844e-06, "loss": 0.7976, "step": 58 }, { "epoch": 0.1761194029850746, "grad_norm": 2.205553839755198, "learning_rate": 9.468163201617063e-06, "loss": 0.8575, "step": 59 }, { "epoch": 0.1791044776119403, "grad_norm": 2.123341193411675, "learning_rate": 9.446195102680531e-06, "loss": 0.8779, "step": 60 }, { "epoch": 0.18208955223880596, "grad_norm": 2.019140032242156, "learning_rate": 9.423808985883289e-06, "loss": 0.8087, "step": 61 }, { "epoch": 0.18507462686567164, "grad_norm": 1.9693330173987202, "learning_rate": 9.401006955900555e-06, "loss": 0.8838, "step": 62 }, { "epoch": 0.1880597014925373, "grad_norm": 1.7933631080982435, "learning_rate": 9.377791156510456e-06, "loss": 0.8881, "step": 63 }, { "epoch": 0.191044776119403, "grad_norm": 2.064611639976522, "learning_rate": 9.35416377039246e-06, "loss": 0.9213, "step": 64 }, { "epoch": 0.19402985074626866, "grad_norm": 2.108631144160596, "learning_rate": 9.330127018922195e-06, "loss": 0.8718, "step": 65 }, { "epoch": 0.19701492537313434, "grad_norm": 2.0324645592765624, "learning_rate": 9.305683161962569e-06, "loss": 0.9008, "step": 66 }, { "epoch": 0.2, "grad_norm": 2.433917861345567, "learning_rate": 9.280834497651334e-06, "loss": 0.8843, "step": 67 }, { "epoch": 0.20298507462686566, "grad_norm": 2.1425405132153377, "learning_rate": 9.255583362184998e-06, "loss": 0.91, "step": 68 }, { "epoch": 0.20597014925373133, "grad_norm": 2.1530253028764794, "learning_rate": 9.229932129599206e-06, "loss": 0.9011, "step": 69 }, { "epoch": 0.208955223880597, "grad_norm": 1.883401707305491, "learning_rate": 9.203883211545517e-06, "loss": 0.8988, "step": 70 }, { "epoch": 0.21194029850746268, "grad_norm": 1.9557201234597423, "learning_rate": 9.177439057064684e-06, "loss": 0.8674, "step": 71 }, { "epoch": 0.21492537313432836, "grad_norm": 1.9754846823177397, "learning_rate": 9.150602152356394e-06, "loss": 0.8917, "step": 72 }, { "epoch": 0.21791044776119403, "grad_norm": 2.1092131375976333, "learning_rate": 9.123375020545534e-06, "loss": 0.9798, "step": 73 }, { "epoch": 0.2208955223880597, "grad_norm": 1.9481297510729838, "learning_rate": 9.09576022144496e-06, "loss": 0.8525, "step": 74 }, { "epoch": 0.22388059701492538, "grad_norm": 2.1761431025018845, "learning_rate": 9.067760351314838e-06, "loss": 0.8726, "step": 75 }, { "epoch": 0.22686567164179106, "grad_norm": 2.2204565282035276, "learning_rate": 9.039378042618556e-06, "loss": 1.0044, "step": 76 }, { "epoch": 0.2298507462686567, "grad_norm": 1.9277579355394385, "learning_rate": 9.01061596377522e-06, "loss": 0.8209, "step": 77 }, { "epoch": 0.23283582089552238, "grad_norm": 2.0131002774833076, "learning_rate": 8.981476818908778e-06, "loss": 0.9414, "step": 78 }, { "epoch": 0.23582089552238805, "grad_norm": 1.8291261816939248, "learning_rate": 8.951963347593797e-06, "loss": 0.832, "step": 79 }, { "epoch": 0.23880597014925373, "grad_norm": 2.174063739192292, "learning_rate": 8.92207832459788e-06, "loss": 0.9025, "step": 80 }, { "epoch": 0.2417910447761194, "grad_norm": 2.1708714478771687, "learning_rate": 8.891824559620801e-06, "loss": 0.8268, "step": 81 }, { "epoch": 0.24477611940298508, "grad_norm": 2.027241742869845, "learning_rate": 8.861204897030346e-06, "loss": 0.7233, "step": 82 }, { "epoch": 0.24776119402985075, "grad_norm": 1.9258937377265744, "learning_rate": 8.83022221559489e-06, "loss": 0.7375, "step": 83 }, { "epoch": 0.2507462686567164, "grad_norm": 1.9152686230971898, "learning_rate": 8.798879428212748e-06, "loss": 0.8811, "step": 84 }, { "epoch": 0.2537313432835821, "grad_norm": 1.7807746386758827, "learning_rate": 8.767179481638303e-06, "loss": 0.8278, "step": 85 }, { "epoch": 0.25671641791044775, "grad_norm": 1.8289188389465045, "learning_rate": 8.735125356204982e-06, "loss": 0.8347, "step": 86 }, { "epoch": 0.25970149253731345, "grad_norm": 2.245431341266184, "learning_rate": 8.702720065545024e-06, "loss": 0.7732, "step": 87 }, { "epoch": 0.2626865671641791, "grad_norm": 1.958611723882418, "learning_rate": 8.669966656306176e-06, "loss": 0.7941, "step": 88 }, { "epoch": 0.2656716417910448, "grad_norm": 1.9764966388329304, "learning_rate": 8.636868207865244e-06, "loss": 0.8525, "step": 89 }, { "epoch": 0.26865671641791045, "grad_norm": 2.1735209259103483, "learning_rate": 8.603427832038574e-06, "loss": 0.8746, "step": 90 }, { "epoch": 0.2716417910447761, "grad_norm": 2.0038856668883813, "learning_rate": 8.569648672789496e-06, "loss": 0.8601, "step": 91 }, { "epoch": 0.2746268656716418, "grad_norm": 2.055939688066048, "learning_rate": 8.535533905932739e-06, "loss": 0.85, "step": 92 }, { "epoch": 0.27761194029850744, "grad_norm": 1.9848488069313877, "learning_rate": 8.501086738835843e-06, "loss": 0.8632, "step": 93 }, { "epoch": 0.28059701492537314, "grad_norm": 2.113603493312776, "learning_rate": 8.466310410117622e-06, "loss": 0.8752, "step": 94 }, { "epoch": 0.2835820895522388, "grad_norm": 2.1479673113866333, "learning_rate": 8.43120818934367e-06, "loss": 0.8588, "step": 95 }, { "epoch": 0.2865671641791045, "grad_norm": 2.053881585055401, "learning_rate": 8.395783376718967e-06, "loss": 0.8122, "step": 96 }, { "epoch": 0.28955223880597014, "grad_norm": 1.9411012651755073, "learning_rate": 8.360039302777614e-06, "loss": 0.8059, "step": 97 }, { "epoch": 0.29253731343283584, "grad_norm": 1.8508350536791303, "learning_rate": 8.323979328069689e-06, "loss": 0.833, "step": 98 }, { "epoch": 0.2955223880597015, "grad_norm": 1.8935253815844801, "learning_rate": 8.28760684284532e-06, "loss": 0.8401, "step": 99 }, { "epoch": 0.29850746268656714, "grad_norm": 2.2781390311354577, "learning_rate": 8.25092526673592e-06, "loss": 0.909, "step": 100 }, { "epoch": 0.30149253731343284, "grad_norm": 2.0985691350237876, "learning_rate": 8.213938048432697e-06, "loss": 0.8366, "step": 101 }, { "epoch": 0.3044776119402985, "grad_norm": 2.0596354416831835, "learning_rate": 8.176648665362426e-06, "loss": 0.7292, "step": 102 }, { "epoch": 0.3074626865671642, "grad_norm": 1.9898953284718084, "learning_rate": 8.139060623360494e-06, "loss": 0.8331, "step": 103 }, { "epoch": 0.31044776119402984, "grad_norm": 2.1541238474940476, "learning_rate": 8.101177456341301e-06, "loss": 0.8119, "step": 104 }, { "epoch": 0.31343283582089554, "grad_norm": 1.945555185400502, "learning_rate": 8.063002725966014e-06, "loss": 0.817, "step": 105 }, { "epoch": 0.3164179104477612, "grad_norm": 1.7990902555417547, "learning_rate": 8.024540021307709e-06, "loss": 0.7763, "step": 106 }, { "epoch": 0.3194029850746269, "grad_norm": 1.9977298309774547, "learning_rate": 7.985792958513932e-06, "loss": 0.857, "step": 107 }, { "epoch": 0.32238805970149254, "grad_norm": 1.8852500490644626, "learning_rate": 7.946765180466725e-06, "loss": 0.7729, "step": 108 }, { "epoch": 0.3253731343283582, "grad_norm": 2.076072697679506, "learning_rate": 7.907460356440133e-06, "loss": 0.8789, "step": 109 }, { "epoch": 0.3283582089552239, "grad_norm": 1.9583549363047221, "learning_rate": 7.86788218175523e-06, "loss": 0.824, "step": 110 }, { "epoch": 0.33134328358208953, "grad_norm": 1.8457221620577013, "learning_rate": 7.828034377432694e-06, "loss": 0.8298, "step": 111 }, { "epoch": 0.33432835820895523, "grad_norm": 1.9836364032355813, "learning_rate": 7.787920689842965e-06, "loss": 0.8754, "step": 112 }, { "epoch": 0.3373134328358209, "grad_norm": 1.9566209100212486, "learning_rate": 7.747544890354031e-06, "loss": 0.8364, "step": 113 }, { "epoch": 0.3402985074626866, "grad_norm": 1.784255320047874, "learning_rate": 7.706910774976849e-06, "loss": 0.7379, "step": 114 }, { "epoch": 0.34328358208955223, "grad_norm": 2.028920304841314, "learning_rate": 7.666022164008458e-06, "loss": 0.8175, "step": 115 }, { "epoch": 0.34626865671641793, "grad_norm": 1.8633625964435614, "learning_rate": 7.624882901672801e-06, "loss": 0.8038, "step": 116 }, { "epoch": 0.3492537313432836, "grad_norm": 2.0316374775972723, "learning_rate": 7.5834968557593155e-06, "loss": 0.8876, "step": 117 }, { "epoch": 0.3522388059701492, "grad_norm": 1.9425263272714322, "learning_rate": 7.541867917259278e-06, "loss": 0.9399, "step": 118 }, { "epoch": 0.35522388059701493, "grad_norm": 1.9570130825151233, "learning_rate": 7.500000000000001e-06, "loss": 0.7776, "step": 119 }, { "epoch": 0.3582089552238806, "grad_norm": 1.9649795879633638, "learning_rate": 7.457897040276853e-06, "loss": 0.8869, "step": 120 }, { "epoch": 0.3611940298507463, "grad_norm": 1.8568802293506739, "learning_rate": 7.415562996483193e-06, "loss": 0.8187, "step": 121 }, { "epoch": 0.3641791044776119, "grad_norm": 1.978910446313143, "learning_rate": 7.373001848738203e-06, "loss": 0.9066, "step": 122 }, { "epoch": 0.36716417910447763, "grad_norm": 1.8873874119153988, "learning_rate": 7.330217598512696e-06, "loss": 0.7271, "step": 123 }, { "epoch": 0.3701492537313433, "grad_norm": 1.888623084115613, "learning_rate": 7.2872142682529045e-06, "loss": 0.7513, "step": 124 }, { "epoch": 0.373134328358209, "grad_norm": 1.8720026536315466, "learning_rate": 7.243995901002312e-06, "loss": 0.8024, "step": 125 }, { "epoch": 0.3761194029850746, "grad_norm": 1.8390581271914446, "learning_rate": 7.200566560021525e-06, "loss": 0.7723, "step": 126 }, { "epoch": 0.37910447761194027, "grad_norm": 2.0123305936558484, "learning_rate": 7.156930328406268e-06, "loss": 0.829, "step": 127 }, { "epoch": 0.382089552238806, "grad_norm": 2.0041614392292373, "learning_rate": 7.113091308703498e-06, "loss": 0.8019, "step": 128 }, { "epoch": 0.3850746268656716, "grad_norm": 2.270729693984615, "learning_rate": 7.069053622525697e-06, "loss": 0.9292, "step": 129 }, { "epoch": 0.3880597014925373, "grad_norm": 1.9441832516476927, "learning_rate": 7.0248214101633685e-06, "loss": 0.7743, "step": 130 }, { "epoch": 0.39104477611940297, "grad_norm": 2.009374270876549, "learning_rate": 6.980398830195785e-06, "loss": 0.9518, "step": 131 }, { "epoch": 0.3940298507462687, "grad_norm": 2.005999418159697, "learning_rate": 6.9357900591000034e-06, "loss": 0.8366, "step": 132 }, { "epoch": 0.3970149253731343, "grad_norm": 2.169897702526253, "learning_rate": 6.890999290858213e-06, "loss": 0.9083, "step": 133 }, { "epoch": 0.4, "grad_norm": 1.949356318876888, "learning_rate": 6.8460307365634225e-06, "loss": 0.776, "step": 134 }, { "epoch": 0.40298507462686567, "grad_norm": 1.9220482972951043, "learning_rate": 6.800888624023552e-06, "loss": 0.8621, "step": 135 }, { "epoch": 0.4059701492537313, "grad_norm": 2.0267035147753294, "learning_rate": 6.755577197363945e-06, "loss": 0.7289, "step": 136 } ], "logging_steps": 1.0, "max_steps": 335, "num_input_tokens_seen": 0, "num_train_epochs": 1, "save_steps": 17, "total_flos": 11395313950720.0, "train_batch_size": 1, "trial_name": null, "trial_params": null }