diff --git "a/checkpoint-1500/trainer_state.json" "b/checkpoint-1500/trainer_state.json" new file mode 100644--- /dev/null +++ "b/checkpoint-1500/trainer_state.json" @@ -0,0 +1,10534 @@ +{ + "best_global_step": null, + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 0.6222130042517888, + "eval_steps": 500, + "global_step": 1500, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.00041480866950119257, + "grad_norm": 5.84375, + "learning_rate": 0.0, + "loss": 5.3678, + "step": 1 + }, + { + "epoch": 0.0008296173390023851, + "grad_norm": 5.15625, + "learning_rate": 4e-05, + "loss": 5.3815, + "step": 2 + }, + { + "epoch": 0.0012444260085035777, + "grad_norm": 8.0625, + "learning_rate": 8e-05, + "loss": 5.578, + "step": 3 + }, + { + "epoch": 0.0016592346780047703, + "grad_norm": 5.15625, + "learning_rate": 0.00012, + "loss": 5.1489, + "step": 4 + }, + { + "epoch": 0.002074043347505963, + "grad_norm": 4.96875, + "learning_rate": 0.00016, + "loss": 5.1569, + "step": 5 + }, + { + "epoch": 0.0024888520170071554, + "grad_norm": 4.3125, + "learning_rate": 0.0002, + "loss": 5.2508, + "step": 6 + }, + { + "epoch": 0.002903660686508348, + "grad_norm": 3.828125, + "learning_rate": 0.00019997998999499752, + "loss": 5.2444, + "step": 7 + }, + { + "epoch": 0.0033184693560095405, + "grad_norm": 3.453125, + "learning_rate": 0.000199959979989995, + "loss": 5.1866, + "step": 8 + }, + { + "epoch": 0.0037332780255107333, + "grad_norm": 4.5625, + "learning_rate": 0.00019993996998499253, + "loss": 4.7983, + "step": 9 + }, + { + "epoch": 0.004148086695011926, + "grad_norm": 4.0, + "learning_rate": 0.00019991995997999, + "loss": 5.2917, + "step": 10 + }, + { + "epoch": 0.0045628953645131184, + "grad_norm": 3.90625, + "learning_rate": 0.0001998999499749875, + "loss": 5.1454, + "step": 11 + }, + { + "epoch": 0.004977704034014311, + "grad_norm": 3.65625, + "learning_rate": 0.000199879939969985, + "loss": 5.2539, + "step": 12 + }, + { + "epoch": 0.005392512703515503, + "grad_norm": 3.671875, + "learning_rate": 0.0001998599299649825, + "loss": 5.2084, + "step": 13 + }, + { + "epoch": 0.005807321373016696, + "grad_norm": 3.65625, + "learning_rate": 0.00019983991995998, + "loss": 5.28, + "step": 14 + }, + { + "epoch": 0.006222130042517889, + "grad_norm": 3.359375, + "learning_rate": 0.0001998199099549775, + "loss": 5.2941, + "step": 15 + }, + { + "epoch": 0.006636938712019081, + "grad_norm": 3.328125, + "learning_rate": 0.000199799899949975, + "loss": 5.0754, + "step": 16 + }, + { + "epoch": 0.007051747381520273, + "grad_norm": 3.953125, + "learning_rate": 0.0001997798899449725, + "loss": 5.0806, + "step": 17 + }, + { + "epoch": 0.007466556051021467, + "grad_norm": 3.59375, + "learning_rate": 0.00019975987993997, + "loss": 5.303, + "step": 18 + }, + { + "epoch": 0.007881364720522659, + "grad_norm": 3.234375, + "learning_rate": 0.0001997398699349675, + "loss": 5.0535, + "step": 19 + }, + { + "epoch": 0.008296173390023852, + "grad_norm": 3.046875, + "learning_rate": 0.000199719859929965, + "loss": 5.0371, + "step": 20 + }, + { + "epoch": 0.008710982059525044, + "grad_norm": 2.90625, + "learning_rate": 0.00019969984992496248, + "loss": 4.9977, + "step": 21 + }, + { + "epoch": 0.009125790729026237, + "grad_norm": 3.0, + "learning_rate": 0.00019967983991996, + "loss": 5.3496, + "step": 22 + }, + { + "epoch": 0.009540599398527428, + "grad_norm": 2.5, + "learning_rate": 0.00019965982991495748, + "loss": 4.8964, + "step": 23 + }, + { + "epoch": 0.009955408068028622, + "grad_norm": 3.15625, + "learning_rate": 0.000199639819909955, + "loss": 5.0368, + "step": 24 + }, + { + "epoch": 0.010370216737529815, + "grad_norm": 2.875, + "learning_rate": 0.00019961980990495248, + "loss": 5.5296, + "step": 25 + }, + { + "epoch": 0.010785025407031006, + "grad_norm": 2.46875, + "learning_rate": 0.00019959979989995, + "loss": 5.1468, + "step": 26 + }, + { + "epoch": 0.0111998340765322, + "grad_norm": 3.09375, + "learning_rate": 0.00019957978989494749, + "loss": 5.0455, + "step": 27 + }, + { + "epoch": 0.011614642746033393, + "grad_norm": 3.09375, + "learning_rate": 0.00019955977988994497, + "loss": 5.1206, + "step": 28 + }, + { + "epoch": 0.012029451415534584, + "grad_norm": 3.03125, + "learning_rate": 0.00019953976988494246, + "loss": 5.0164, + "step": 29 + }, + { + "epoch": 0.012444260085035777, + "grad_norm": 2.765625, + "learning_rate": 0.00019951975987993998, + "loss": 5.0402, + "step": 30 + }, + { + "epoch": 0.01285906875453697, + "grad_norm": 2.5625, + "learning_rate": 0.00019949974987493746, + "loss": 5.1637, + "step": 31 + }, + { + "epoch": 0.013273877424038162, + "grad_norm": 3.078125, + "learning_rate": 0.00019947973986993498, + "loss": 5.1842, + "step": 32 + }, + { + "epoch": 0.013688686093539355, + "grad_norm": 2.40625, + "learning_rate": 0.00019945972986493246, + "loss": 4.8599, + "step": 33 + }, + { + "epoch": 0.014103494763040547, + "grad_norm": 2.546875, + "learning_rate": 0.00019943971985992998, + "loss": 5.1072, + "step": 34 + }, + { + "epoch": 0.01451830343254174, + "grad_norm": 2.359375, + "learning_rate": 0.0001994197098549275, + "loss": 5.1639, + "step": 35 + }, + { + "epoch": 0.014933112102042933, + "grad_norm": 2.375, + "learning_rate": 0.00019939969984992498, + "loss": 5.0431, + "step": 36 + }, + { + "epoch": 0.015347920771544125, + "grad_norm": 3.140625, + "learning_rate": 0.00019937968984492247, + "loss": 5.0079, + "step": 37 + }, + { + "epoch": 0.015762729441045318, + "grad_norm": 2.421875, + "learning_rate": 0.00019935967983991995, + "loss": 5.1109, + "step": 38 + }, + { + "epoch": 0.01617753811054651, + "grad_norm": 2.390625, + "learning_rate": 0.00019933966983491747, + "loss": 5.0062, + "step": 39 + }, + { + "epoch": 0.016592346780047704, + "grad_norm": 2.453125, + "learning_rate": 0.00019931965982991496, + "loss": 5.1522, + "step": 40 + }, + { + "epoch": 0.017007155449548894, + "grad_norm": 2.578125, + "learning_rate": 0.00019929964982491247, + "loss": 4.9839, + "step": 41 + }, + { + "epoch": 0.017421964119050087, + "grad_norm": 2.25, + "learning_rate": 0.00019927963981990996, + "loss": 5.0712, + "step": 42 + }, + { + "epoch": 0.01783677278855128, + "grad_norm": 2.65625, + "learning_rate": 0.00019925962981490747, + "loss": 5.2342, + "step": 43 + }, + { + "epoch": 0.018251581458052474, + "grad_norm": 2.46875, + "learning_rate": 0.00019923961980990496, + "loss": 5.0108, + "step": 44 + }, + { + "epoch": 0.018666390127553667, + "grad_norm": 2.671875, + "learning_rate": 0.00019921960980490247, + "loss": 5.1698, + "step": 45 + }, + { + "epoch": 0.019081198797054857, + "grad_norm": 3.296875, + "learning_rate": 0.00019919959979989996, + "loss": 5.1331, + "step": 46 + }, + { + "epoch": 0.01949600746655605, + "grad_norm": 3.203125, + "learning_rate": 0.00019917958979489745, + "loss": 5.1377, + "step": 47 + }, + { + "epoch": 0.019910816136057243, + "grad_norm": 2.59375, + "learning_rate": 0.00019915957978989496, + "loss": 4.8147, + "step": 48 + }, + { + "epoch": 0.020325624805558436, + "grad_norm": 4.0, + "learning_rate": 0.00019913956978489245, + "loss": 4.7886, + "step": 49 + }, + { + "epoch": 0.02074043347505963, + "grad_norm": 2.796875, + "learning_rate": 0.00019911955977988996, + "loss": 5.0484, + "step": 50 + }, + { + "epoch": 0.021155242144560823, + "grad_norm": 3.078125, + "learning_rate": 0.00019909954977488745, + "loss": 5.22, + "step": 51 + }, + { + "epoch": 0.021570050814062013, + "grad_norm": 2.359375, + "learning_rate": 0.00019907953976988497, + "loss": 5.0641, + "step": 52 + }, + { + "epoch": 0.021984859483563206, + "grad_norm": 2.71875, + "learning_rate": 0.00019905952976488245, + "loss": 4.9759, + "step": 53 + }, + { + "epoch": 0.0223996681530644, + "grad_norm": 2.640625, + "learning_rate": 0.00019903951975987997, + "loss": 5.0263, + "step": 54 + }, + { + "epoch": 0.022814476822565592, + "grad_norm": 3.171875, + "learning_rate": 0.00019901950975487746, + "loss": 5.1059, + "step": 55 + }, + { + "epoch": 0.023229285492066785, + "grad_norm": 2.875, + "learning_rate": 0.00019899949974987494, + "loss": 5.0911, + "step": 56 + }, + { + "epoch": 0.023644094161567975, + "grad_norm": 3.390625, + "learning_rate": 0.00019897948974487243, + "loss": 4.9596, + "step": 57 + }, + { + "epoch": 0.02405890283106917, + "grad_norm": 2.4375, + "learning_rate": 0.00019895947973986994, + "loss": 4.9327, + "step": 58 + }, + { + "epoch": 0.02447371150057036, + "grad_norm": 2.421875, + "learning_rate": 0.00019893946973486743, + "loss": 5.2353, + "step": 59 + }, + { + "epoch": 0.024888520170071555, + "grad_norm": 2.609375, + "learning_rate": 0.00019891945972986495, + "loss": 4.7571, + "step": 60 + }, + { + "epoch": 0.025303328839572748, + "grad_norm": 2.75, + "learning_rate": 0.00019889944972486243, + "loss": 5.0165, + "step": 61 + }, + { + "epoch": 0.02571813750907394, + "grad_norm": 2.421875, + "learning_rate": 0.00019887943971985995, + "loss": 5.0253, + "step": 62 + }, + { + "epoch": 0.02613294617857513, + "grad_norm": 2.640625, + "learning_rate": 0.00019885942971485744, + "loss": 5.0432, + "step": 63 + }, + { + "epoch": 0.026547754848076324, + "grad_norm": 2.640625, + "learning_rate": 0.00019883941970985492, + "loss": 5.2195, + "step": 64 + }, + { + "epoch": 0.026962563517577517, + "grad_norm": 2.265625, + "learning_rate": 0.00019881940970485244, + "loss": 5.157, + "step": 65 + }, + { + "epoch": 0.02737737218707871, + "grad_norm": 2.4375, + "learning_rate": 0.00019879939969984992, + "loss": 5.0014, + "step": 66 + }, + { + "epoch": 0.027792180856579904, + "grad_norm": 2.421875, + "learning_rate": 0.00019877938969484744, + "loss": 5.05, + "step": 67 + }, + { + "epoch": 0.028206989526081094, + "grad_norm": 2.71875, + "learning_rate": 0.00019875937968984493, + "loss": 4.9888, + "step": 68 + }, + { + "epoch": 0.028621798195582287, + "grad_norm": 2.71875, + "learning_rate": 0.00019873936968484244, + "loss": 4.7699, + "step": 69 + }, + { + "epoch": 0.02903660686508348, + "grad_norm": 2.328125, + "learning_rate": 0.00019871935967983993, + "loss": 4.6892, + "step": 70 + }, + { + "epoch": 0.029451415534584673, + "grad_norm": 2.875, + "learning_rate": 0.00019869934967483744, + "loss": 4.8417, + "step": 71 + }, + { + "epoch": 0.029866224204085867, + "grad_norm": 2.65625, + "learning_rate": 0.00019867933966983493, + "loss": 4.9361, + "step": 72 + }, + { + "epoch": 0.030281032873587056, + "grad_norm": 3.21875, + "learning_rate": 0.00019865932966483242, + "loss": 4.86, + "step": 73 + }, + { + "epoch": 0.03069584154308825, + "grad_norm": 2.671875, + "learning_rate": 0.0001986393196598299, + "loss": 5.232, + "step": 74 + }, + { + "epoch": 0.031110650212589443, + "grad_norm": 2.578125, + "learning_rate": 0.00019861930965482742, + "loss": 5.0538, + "step": 75 + }, + { + "epoch": 0.031525458882090636, + "grad_norm": 2.828125, + "learning_rate": 0.00019859929964982493, + "loss": 4.9119, + "step": 76 + }, + { + "epoch": 0.03194026755159183, + "grad_norm": 2.5, + "learning_rate": 0.00019857928964482242, + "loss": 4.9387, + "step": 77 + }, + { + "epoch": 0.03235507622109302, + "grad_norm": 2.78125, + "learning_rate": 0.00019855927963981993, + "loss": 5.0123, + "step": 78 + }, + { + "epoch": 0.032769884890594216, + "grad_norm": 3.65625, + "learning_rate": 0.00019853926963481742, + "loss": 4.8198, + "step": 79 + }, + { + "epoch": 0.03318469356009541, + "grad_norm": 2.84375, + "learning_rate": 0.00019851925962981494, + "loss": 5.1624, + "step": 80 + }, + { + "epoch": 0.0335995022295966, + "grad_norm": 2.765625, + "learning_rate": 0.00019849924962481242, + "loss": 4.7054, + "step": 81 + }, + { + "epoch": 0.03401431089909779, + "grad_norm": 3.1875, + "learning_rate": 0.0001984792396198099, + "loss": 4.8135, + "step": 82 + }, + { + "epoch": 0.03442911956859898, + "grad_norm": 2.921875, + "learning_rate": 0.0001984592296148074, + "loss": 5.0179, + "step": 83 + }, + { + "epoch": 0.034843928238100175, + "grad_norm": 2.859375, + "learning_rate": 0.0001984392196098049, + "loss": 4.8094, + "step": 84 + }, + { + "epoch": 0.03525873690760137, + "grad_norm": 2.96875, + "learning_rate": 0.0001984192096048024, + "loss": 4.8648, + "step": 85 + }, + { + "epoch": 0.03567354557710256, + "grad_norm": 2.484375, + "learning_rate": 0.0001983991995997999, + "loss": 4.8648, + "step": 86 + }, + { + "epoch": 0.036088354246603754, + "grad_norm": 2.484375, + "learning_rate": 0.0001983791895947974, + "loss": 5.2012, + "step": 87 + }, + { + "epoch": 0.03650316291610495, + "grad_norm": 2.625, + "learning_rate": 0.00019835917958979492, + "loss": 5.0743, + "step": 88 + }, + { + "epoch": 0.03691797158560614, + "grad_norm": 2.4375, + "learning_rate": 0.0001983391695847924, + "loss": 4.9473, + "step": 89 + }, + { + "epoch": 0.037332780255107334, + "grad_norm": 2.546875, + "learning_rate": 0.00019831915957978992, + "loss": 4.8153, + "step": 90 + }, + { + "epoch": 0.03774758892460853, + "grad_norm": 2.5625, + "learning_rate": 0.0001982991495747874, + "loss": 4.9615, + "step": 91 + }, + { + "epoch": 0.038162397594109713, + "grad_norm": 2.359375, + "learning_rate": 0.0001982791395697849, + "loss": 4.7808, + "step": 92 + }, + { + "epoch": 0.03857720626361091, + "grad_norm": 3.578125, + "learning_rate": 0.0001982591295647824, + "loss": 4.7397, + "step": 93 + }, + { + "epoch": 0.0389920149331121, + "grad_norm": 3.171875, + "learning_rate": 0.0001982391195597799, + "loss": 4.7318, + "step": 94 + }, + { + "epoch": 0.03940682360261329, + "grad_norm": 2.71875, + "learning_rate": 0.0001982191095547774, + "loss": 4.7505, + "step": 95 + }, + { + "epoch": 0.039821632272114486, + "grad_norm": 2.84375, + "learning_rate": 0.0001981990995497749, + "loss": 4.9952, + "step": 96 + }, + { + "epoch": 0.04023644094161568, + "grad_norm": 3.390625, + "learning_rate": 0.0001981790895447724, + "loss": 4.7346, + "step": 97 + }, + { + "epoch": 0.04065124961111687, + "grad_norm": 2.828125, + "learning_rate": 0.0001981590795397699, + "loss": 4.9629, + "step": 98 + }, + { + "epoch": 0.041066058280618066, + "grad_norm": 2.5625, + "learning_rate": 0.00019813906953476738, + "loss": 4.9172, + "step": 99 + }, + { + "epoch": 0.04148086695011926, + "grad_norm": 3.09375, + "learning_rate": 0.00019811905952976487, + "loss": 4.7065, + "step": 100 + }, + { + "epoch": 0.04189567561962045, + "grad_norm": 3.25, + "learning_rate": 0.00019809904952476239, + "loss": 4.8555, + "step": 101 + }, + { + "epoch": 0.042310484289121646, + "grad_norm": 3.171875, + "learning_rate": 0.00019807903951975987, + "loss": 5.1348, + "step": 102 + }, + { + "epoch": 0.04272529295862283, + "grad_norm": 2.734375, + "learning_rate": 0.0001980590295147574, + "loss": 4.6186, + "step": 103 + }, + { + "epoch": 0.043140101628124025, + "grad_norm": 2.75, + "learning_rate": 0.0001980390195097549, + "loss": 4.9732, + "step": 104 + }, + { + "epoch": 0.04355491029762522, + "grad_norm": 3.1875, + "learning_rate": 0.0001980190095047524, + "loss": 4.6935, + "step": 105 + }, + { + "epoch": 0.04396971896712641, + "grad_norm": 2.640625, + "learning_rate": 0.0001979989994997499, + "loss": 4.8805, + "step": 106 + }, + { + "epoch": 0.044384527636627605, + "grad_norm": 2.828125, + "learning_rate": 0.0001979789894947474, + "loss": 4.7667, + "step": 107 + }, + { + "epoch": 0.0447993363061288, + "grad_norm": 2.515625, + "learning_rate": 0.00019795897948974488, + "loss": 4.5853, + "step": 108 + }, + { + "epoch": 0.04521414497562999, + "grad_norm": 2.640625, + "learning_rate": 0.00019793896948474236, + "loss": 4.8158, + "step": 109 + }, + { + "epoch": 0.045628953645131184, + "grad_norm": 3.046875, + "learning_rate": 0.00019791895947973988, + "loss": 4.7036, + "step": 110 + }, + { + "epoch": 0.04604376231463238, + "grad_norm": 2.5, + "learning_rate": 0.00019789894947473737, + "loss": 4.8263, + "step": 111 + }, + { + "epoch": 0.04645857098413357, + "grad_norm": 2.890625, + "learning_rate": 0.00019787893946973488, + "loss": 4.6605, + "step": 112 + }, + { + "epoch": 0.046873379653634764, + "grad_norm": 2.625, + "learning_rate": 0.00019785892946473237, + "loss": 4.941, + "step": 113 + }, + { + "epoch": 0.04728818832313595, + "grad_norm": 2.296875, + "learning_rate": 0.00019783891945972988, + "loss": 4.8322, + "step": 114 + }, + { + "epoch": 0.047702996992637144, + "grad_norm": 2.65625, + "learning_rate": 0.00019781890945472737, + "loss": 4.7647, + "step": 115 + }, + { + "epoch": 0.04811780566213834, + "grad_norm": 2.640625, + "learning_rate": 0.00019779889944972488, + "loss": 4.9316, + "step": 116 + }, + { + "epoch": 0.04853261433163953, + "grad_norm": 3.078125, + "learning_rate": 0.00019777888944472237, + "loss": 5.0164, + "step": 117 + }, + { + "epoch": 0.04894742300114072, + "grad_norm": 2.84375, + "learning_rate": 0.00019775887943971986, + "loss": 5.037, + "step": 118 + }, + { + "epoch": 0.049362231670641916, + "grad_norm": 2.578125, + "learning_rate": 0.00019773886943471737, + "loss": 4.8183, + "step": 119 + }, + { + "epoch": 0.04977704034014311, + "grad_norm": 2.6875, + "learning_rate": 0.00019771885942971486, + "loss": 4.6524, + "step": 120 + }, + { + "epoch": 0.0501918490096443, + "grad_norm": 2.578125, + "learning_rate": 0.00019769884942471237, + "loss": 4.8141, + "step": 121 + }, + { + "epoch": 0.050606657679145496, + "grad_norm": 2.515625, + "learning_rate": 0.00019767883941970986, + "loss": 4.719, + "step": 122 + }, + { + "epoch": 0.05102146634864669, + "grad_norm": 3.0, + "learning_rate": 0.00019765882941470738, + "loss": 4.6229, + "step": 123 + }, + { + "epoch": 0.05143627501814788, + "grad_norm": 3.03125, + "learning_rate": 0.00019763881940970486, + "loss": 4.7896, + "step": 124 + }, + { + "epoch": 0.05185108368764907, + "grad_norm": 2.78125, + "learning_rate": 0.00019761880940470238, + "loss": 4.6164, + "step": 125 + }, + { + "epoch": 0.05226589235715026, + "grad_norm": 2.46875, + "learning_rate": 0.00019759879939969987, + "loss": 4.777, + "step": 126 + }, + { + "epoch": 0.052680701026651455, + "grad_norm": 2.40625, + "learning_rate": 0.00019757878939469735, + "loss": 4.7161, + "step": 127 + }, + { + "epoch": 0.05309550969615265, + "grad_norm": 2.484375, + "learning_rate": 0.00019755877938969484, + "loss": 4.5956, + "step": 128 + }, + { + "epoch": 0.05351031836565384, + "grad_norm": 2.8125, + "learning_rate": 0.00019753876938469235, + "loss": 4.8752, + "step": 129 + }, + { + "epoch": 0.053925127035155035, + "grad_norm": 2.09375, + "learning_rate": 0.00019751875937968984, + "loss": 4.8244, + "step": 130 + }, + { + "epoch": 0.05433993570465623, + "grad_norm": 2.6875, + "learning_rate": 0.00019749874937468736, + "loss": 4.9454, + "step": 131 + }, + { + "epoch": 0.05475474437415742, + "grad_norm": 2.390625, + "learning_rate": 0.00019747873936968487, + "loss": 4.8617, + "step": 132 + }, + { + "epoch": 0.055169553043658615, + "grad_norm": 2.65625, + "learning_rate": 0.00019745872936468236, + "loss": 4.9938, + "step": 133 + }, + { + "epoch": 0.05558436171315981, + "grad_norm": 2.390625, + "learning_rate": 0.00019743871935967985, + "loss": 4.6219, + "step": 134 + }, + { + "epoch": 0.055999170382661, + "grad_norm": 3.296875, + "learning_rate": 0.00019741870935467733, + "loss": 4.699, + "step": 135 + }, + { + "epoch": 0.05641397905216219, + "grad_norm": 2.78125, + "learning_rate": 0.00019739869934967485, + "loss": 4.8001, + "step": 136 + }, + { + "epoch": 0.05682878772166338, + "grad_norm": 2.40625, + "learning_rate": 0.00019737868934467233, + "loss": 4.617, + "step": 137 + }, + { + "epoch": 0.057243596391164574, + "grad_norm": 2.59375, + "learning_rate": 0.00019735867933966985, + "loss": 4.6295, + "step": 138 + }, + { + "epoch": 0.05765840506066577, + "grad_norm": 2.828125, + "learning_rate": 0.00019733866933466734, + "loss": 4.716, + "step": 139 + }, + { + "epoch": 0.05807321373016696, + "grad_norm": 2.265625, + "learning_rate": 0.00019731865932966485, + "loss": 4.7463, + "step": 140 + }, + { + "epoch": 0.05848802239966815, + "grad_norm": 2.484375, + "learning_rate": 0.00019729864932466234, + "loss": 4.6761, + "step": 141 + }, + { + "epoch": 0.05890283106916935, + "grad_norm": 2.703125, + "learning_rate": 0.00019727863931965985, + "loss": 4.7429, + "step": 142 + }, + { + "epoch": 0.05931763973867054, + "grad_norm": 2.671875, + "learning_rate": 0.00019725862931465734, + "loss": 4.9624, + "step": 143 + }, + { + "epoch": 0.05973244840817173, + "grad_norm": 3.171875, + "learning_rate": 0.00019723861930965483, + "loss": 5.0382, + "step": 144 + }, + { + "epoch": 0.060147257077672926, + "grad_norm": 2.71875, + "learning_rate": 0.00019721860930465234, + "loss": 4.7497, + "step": 145 + }, + { + "epoch": 0.06056206574717411, + "grad_norm": 2.3125, + "learning_rate": 0.00019719859929964983, + "loss": 5.0408, + "step": 146 + }, + { + "epoch": 0.060976874416675306, + "grad_norm": 2.375, + "learning_rate": 0.00019717858929464734, + "loss": 4.8933, + "step": 147 + }, + { + "epoch": 0.0613916830861765, + "grad_norm": 2.328125, + "learning_rate": 0.00019715857928964483, + "loss": 4.7968, + "step": 148 + }, + { + "epoch": 0.06180649175567769, + "grad_norm": 2.46875, + "learning_rate": 0.00019713856928464234, + "loss": 4.8065, + "step": 149 + }, + { + "epoch": 0.062221300425178885, + "grad_norm": 2.765625, + "learning_rate": 0.00019711855927963983, + "loss": 4.8984, + "step": 150 + }, + { + "epoch": 0.06263610909468008, + "grad_norm": 2.3125, + "learning_rate": 0.00019709854927463735, + "loss": 4.8585, + "step": 151 + }, + { + "epoch": 0.06305091776418127, + "grad_norm": 2.546875, + "learning_rate": 0.00019707853926963483, + "loss": 4.6844, + "step": 152 + }, + { + "epoch": 0.06346572643368247, + "grad_norm": 2.890625, + "learning_rate": 0.00019705852926463232, + "loss": 4.7681, + "step": 153 + }, + { + "epoch": 0.06388053510318366, + "grad_norm": 2.953125, + "learning_rate": 0.0001970385192596298, + "loss": 4.9683, + "step": 154 + }, + { + "epoch": 0.06429534377268485, + "grad_norm": 2.609375, + "learning_rate": 0.00019701850925462732, + "loss": 4.6499, + "step": 155 + }, + { + "epoch": 0.06471015244218604, + "grad_norm": 3.046875, + "learning_rate": 0.0001969984992496248, + "loss": 4.8674, + "step": 156 + }, + { + "epoch": 0.06512496111168724, + "grad_norm": 2.53125, + "learning_rate": 0.00019697848924462232, + "loss": 4.8091, + "step": 157 + }, + { + "epoch": 0.06553976978118843, + "grad_norm": 2.421875, + "learning_rate": 0.0001969584792396198, + "loss": 4.7716, + "step": 158 + }, + { + "epoch": 0.06595457845068962, + "grad_norm": 3.046875, + "learning_rate": 0.00019693846923461733, + "loss": 4.9851, + "step": 159 + }, + { + "epoch": 0.06636938712019082, + "grad_norm": 2.5625, + "learning_rate": 0.00019691845922961484, + "loss": 4.7429, + "step": 160 + }, + { + "epoch": 0.06678419578969201, + "grad_norm": 2.703125, + "learning_rate": 0.00019689844922461233, + "loss": 4.7494, + "step": 161 + }, + { + "epoch": 0.0671990044591932, + "grad_norm": 2.234375, + "learning_rate": 0.00019687843921960981, + "loss": 4.6275, + "step": 162 + }, + { + "epoch": 0.06761381312869438, + "grad_norm": 2.140625, + "learning_rate": 0.0001968584292146073, + "loss": 4.6852, + "step": 163 + }, + { + "epoch": 0.06802862179819558, + "grad_norm": 2.40625, + "learning_rate": 0.00019683841920960482, + "loss": 4.9879, + "step": 164 + }, + { + "epoch": 0.06844343046769677, + "grad_norm": 2.15625, + "learning_rate": 0.0001968184092046023, + "loss": 4.7699, + "step": 165 + }, + { + "epoch": 0.06885823913719796, + "grad_norm": 2.546875, + "learning_rate": 0.00019679839919959982, + "loss": 4.719, + "step": 166 + }, + { + "epoch": 0.06927304780669916, + "grad_norm": 3.0, + "learning_rate": 0.0001967783891945973, + "loss": 4.7444, + "step": 167 + }, + { + "epoch": 0.06968785647620035, + "grad_norm": 2.921875, + "learning_rate": 0.00019675837918959482, + "loss": 4.5795, + "step": 168 + }, + { + "epoch": 0.07010266514570154, + "grad_norm": 2.578125, + "learning_rate": 0.0001967383691845923, + "loss": 4.5533, + "step": 169 + }, + { + "epoch": 0.07051747381520274, + "grad_norm": 2.8125, + "learning_rate": 0.0001967183591795898, + "loss": 4.6815, + "step": 170 + }, + { + "epoch": 0.07093228248470393, + "grad_norm": 2.921875, + "learning_rate": 0.00019669834917458728, + "loss": 4.5554, + "step": 171 + }, + { + "epoch": 0.07134709115420512, + "grad_norm": 2.921875, + "learning_rate": 0.0001966783391695848, + "loss": 4.5954, + "step": 172 + }, + { + "epoch": 0.07176189982370632, + "grad_norm": 2.4375, + "learning_rate": 0.0001966583291645823, + "loss": 4.4794, + "step": 173 + }, + { + "epoch": 0.07217670849320751, + "grad_norm": 2.5625, + "learning_rate": 0.0001966383191595798, + "loss": 4.7955, + "step": 174 + }, + { + "epoch": 0.0725915171627087, + "grad_norm": 3.71875, + "learning_rate": 0.0001966183091545773, + "loss": 4.7153, + "step": 175 + }, + { + "epoch": 0.0730063258322099, + "grad_norm": 2.546875, + "learning_rate": 0.0001965982991495748, + "loss": 4.9269, + "step": 176 + }, + { + "epoch": 0.07342113450171109, + "grad_norm": 2.6875, + "learning_rate": 0.0001965782891445723, + "loss": 4.5882, + "step": 177 + }, + { + "epoch": 0.07383594317121228, + "grad_norm": 2.375, + "learning_rate": 0.0001965582791395698, + "loss": 4.6883, + "step": 178 + }, + { + "epoch": 0.07425075184071347, + "grad_norm": 2.75, + "learning_rate": 0.0001965382691345673, + "loss": 4.6313, + "step": 179 + }, + { + "epoch": 0.07466556051021467, + "grad_norm": 2.421875, + "learning_rate": 0.00019651825912956477, + "loss": 4.676, + "step": 180 + }, + { + "epoch": 0.07508036917971586, + "grad_norm": 2.546875, + "learning_rate": 0.0001964982491245623, + "loss": 4.5967, + "step": 181 + }, + { + "epoch": 0.07549517784921705, + "grad_norm": 2.53125, + "learning_rate": 0.00019647823911955978, + "loss": 4.863, + "step": 182 + }, + { + "epoch": 0.07590998651871825, + "grad_norm": 2.765625, + "learning_rate": 0.0001964582291145573, + "loss": 4.4148, + "step": 183 + }, + { + "epoch": 0.07632479518821943, + "grad_norm": 2.453125, + "learning_rate": 0.00019643821910955478, + "loss": 4.8396, + "step": 184 + }, + { + "epoch": 0.07673960385772062, + "grad_norm": 2.625, + "learning_rate": 0.0001964182091045523, + "loss": 4.741, + "step": 185 + }, + { + "epoch": 0.07715441252722181, + "grad_norm": 2.890625, + "learning_rate": 0.00019639819909954978, + "loss": 4.6253, + "step": 186 + }, + { + "epoch": 0.077569221196723, + "grad_norm": 2.90625, + "learning_rate": 0.0001963781890945473, + "loss": 4.6309, + "step": 187 + }, + { + "epoch": 0.0779840298662242, + "grad_norm": 2.640625, + "learning_rate": 0.00019635817908954478, + "loss": 4.8369, + "step": 188 + }, + { + "epoch": 0.0783988385357254, + "grad_norm": 2.59375, + "learning_rate": 0.00019633816908454227, + "loss": 4.8035, + "step": 189 + }, + { + "epoch": 0.07881364720522659, + "grad_norm": 2.75, + "learning_rate": 0.00019631815907953978, + "loss": 4.8848, + "step": 190 + }, + { + "epoch": 0.07922845587472778, + "grad_norm": 2.421875, + "learning_rate": 0.00019629814907453727, + "loss": 4.8717, + "step": 191 + }, + { + "epoch": 0.07964326454422897, + "grad_norm": 2.84375, + "learning_rate": 0.00019627813906953478, + "loss": 5.0118, + "step": 192 + }, + { + "epoch": 0.08005807321373017, + "grad_norm": 2.6875, + "learning_rate": 0.00019625812906453227, + "loss": 4.7332, + "step": 193 + }, + { + "epoch": 0.08047288188323136, + "grad_norm": 2.8125, + "learning_rate": 0.0001962381190595298, + "loss": 4.6317, + "step": 194 + }, + { + "epoch": 0.08088769055273255, + "grad_norm": 2.109375, + "learning_rate": 0.00019621810905452727, + "loss": 4.6825, + "step": 195 + }, + { + "epoch": 0.08130249922223375, + "grad_norm": 2.703125, + "learning_rate": 0.0001961980990495248, + "loss": 4.5926, + "step": 196 + }, + { + "epoch": 0.08171730789173494, + "grad_norm": 2.546875, + "learning_rate": 0.00019617808904452225, + "loss": 4.6585, + "step": 197 + }, + { + "epoch": 0.08213211656123613, + "grad_norm": 2.609375, + "learning_rate": 0.00019615807903951976, + "loss": 4.7669, + "step": 198 + }, + { + "epoch": 0.08254692523073733, + "grad_norm": 2.703125, + "learning_rate": 0.00019613806903451725, + "loss": 4.7337, + "step": 199 + }, + { + "epoch": 0.08296173390023852, + "grad_norm": 2.375, + "learning_rate": 0.00019611805902951476, + "loss": 4.9821, + "step": 200 + }, + { + "epoch": 0.08337654256973971, + "grad_norm": 2.75, + "learning_rate": 0.00019609804902451228, + "loss": 4.6123, + "step": 201 + }, + { + "epoch": 0.0837913512392409, + "grad_norm": 3.140625, + "learning_rate": 0.00019607803901950977, + "loss": 4.9772, + "step": 202 + }, + { + "epoch": 0.0842061599087421, + "grad_norm": 3.546875, + "learning_rate": 0.00019605802901450728, + "loss": 4.5804, + "step": 203 + }, + { + "epoch": 0.08462096857824329, + "grad_norm": 2.875, + "learning_rate": 0.00019603801900950477, + "loss": 4.7674, + "step": 204 + }, + { + "epoch": 0.08503577724774448, + "grad_norm": 2.515625, + "learning_rate": 0.00019601800900450226, + "loss": 4.8421, + "step": 205 + }, + { + "epoch": 0.08545058591724566, + "grad_norm": 2.484375, + "learning_rate": 0.00019599799899949974, + "loss": 4.6861, + "step": 206 + }, + { + "epoch": 0.08586539458674686, + "grad_norm": 2.578125, + "learning_rate": 0.00019597798899449726, + "loss": 4.5915, + "step": 207 + }, + { + "epoch": 0.08628020325624805, + "grad_norm": 2.9375, + "learning_rate": 0.00019595797898949474, + "loss": 4.7162, + "step": 208 + }, + { + "epoch": 0.08669501192574924, + "grad_norm": 2.46875, + "learning_rate": 0.00019593796898449226, + "loss": 4.8773, + "step": 209 + }, + { + "epoch": 0.08710982059525044, + "grad_norm": 2.40625, + "learning_rate": 0.00019591795897948975, + "loss": 4.6635, + "step": 210 + }, + { + "epoch": 0.08752462926475163, + "grad_norm": 2.75, + "learning_rate": 0.00019589794897448726, + "loss": 4.8939, + "step": 211 + }, + { + "epoch": 0.08793943793425282, + "grad_norm": 2.53125, + "learning_rate": 0.00019587793896948475, + "loss": 4.8248, + "step": 212 + }, + { + "epoch": 0.08835424660375402, + "grad_norm": 2.609375, + "learning_rate": 0.00019585792896448226, + "loss": 4.5695, + "step": 213 + }, + { + "epoch": 0.08876905527325521, + "grad_norm": 2.515625, + "learning_rate": 0.00019583791895947975, + "loss": 4.7652, + "step": 214 + }, + { + "epoch": 0.0891838639427564, + "grad_norm": 2.359375, + "learning_rate": 0.00019581790895447724, + "loss": 4.6485, + "step": 215 + }, + { + "epoch": 0.0895986726122576, + "grad_norm": 2.171875, + "learning_rate": 0.00019579789894947475, + "loss": 4.6893, + "step": 216 + }, + { + "epoch": 0.09001348128175879, + "grad_norm": 2.515625, + "learning_rate": 0.00019577788894447224, + "loss": 4.7114, + "step": 217 + }, + { + "epoch": 0.09042828995125998, + "grad_norm": 2.296875, + "learning_rate": 0.00019575787893946975, + "loss": 4.6796, + "step": 218 + }, + { + "epoch": 0.09084309862076118, + "grad_norm": 2.359375, + "learning_rate": 0.00019573786893446724, + "loss": 4.7153, + "step": 219 + }, + { + "epoch": 0.09125790729026237, + "grad_norm": 2.6875, + "learning_rate": 0.00019571785892946475, + "loss": 4.5696, + "step": 220 + }, + { + "epoch": 0.09167271595976356, + "grad_norm": 2.453125, + "learning_rate": 0.00019569784892446224, + "loss": 5.0882, + "step": 221 + }, + { + "epoch": 0.09208752462926476, + "grad_norm": 2.390625, + "learning_rate": 0.00019567783891945976, + "loss": 4.7033, + "step": 222 + }, + { + "epoch": 0.09250233329876595, + "grad_norm": 2.609375, + "learning_rate": 0.00019565782891445724, + "loss": 4.813, + "step": 223 + }, + { + "epoch": 0.09291714196826714, + "grad_norm": 2.234375, + "learning_rate": 0.00019563781890945473, + "loss": 4.4582, + "step": 224 + }, + { + "epoch": 0.09333195063776833, + "grad_norm": 2.234375, + "learning_rate": 0.00019561780890445222, + "loss": 4.7579, + "step": 225 + }, + { + "epoch": 0.09374675930726953, + "grad_norm": 2.40625, + "learning_rate": 0.00019559779889944973, + "loss": 4.6935, + "step": 226 + }, + { + "epoch": 0.09416156797677072, + "grad_norm": 2.46875, + "learning_rate": 0.00019557778889444722, + "loss": 4.7105, + "step": 227 + }, + { + "epoch": 0.0945763766462719, + "grad_norm": 2.546875, + "learning_rate": 0.00019555777888944473, + "loss": 4.5942, + "step": 228 + }, + { + "epoch": 0.0949911853157731, + "grad_norm": 2.5, + "learning_rate": 0.00019553776888444225, + "loss": 4.6961, + "step": 229 + }, + { + "epoch": 0.09540599398527429, + "grad_norm": 2.53125, + "learning_rate": 0.00019551775887943974, + "loss": 4.9044, + "step": 230 + }, + { + "epoch": 0.09582080265477548, + "grad_norm": 2.3125, + "learning_rate": 0.00019549774887443725, + "loss": 4.7508, + "step": 231 + }, + { + "epoch": 0.09623561132427667, + "grad_norm": 2.203125, + "learning_rate": 0.0001954777388694347, + "loss": 4.7008, + "step": 232 + }, + { + "epoch": 0.09665041999377787, + "grad_norm": 2.3125, + "learning_rate": 0.00019545772886443222, + "loss": 4.9721, + "step": 233 + }, + { + "epoch": 0.09706522866327906, + "grad_norm": 2.53125, + "learning_rate": 0.0001954377188594297, + "loss": 4.54, + "step": 234 + }, + { + "epoch": 0.09748003733278025, + "grad_norm": 2.265625, + "learning_rate": 0.00019541770885442723, + "loss": 4.8986, + "step": 235 + }, + { + "epoch": 0.09789484600228145, + "grad_norm": 2.421875, + "learning_rate": 0.0001953976988494247, + "loss": 4.3516, + "step": 236 + }, + { + "epoch": 0.09830965467178264, + "grad_norm": 2.890625, + "learning_rate": 0.00019537768884442223, + "loss": 4.6487, + "step": 237 + }, + { + "epoch": 0.09872446334128383, + "grad_norm": 2.234375, + "learning_rate": 0.00019535767883941971, + "loss": 4.7861, + "step": 238 + }, + { + "epoch": 0.09913927201078503, + "grad_norm": 2.171875, + "learning_rate": 0.00019533766883441723, + "loss": 4.4747, + "step": 239 + }, + { + "epoch": 0.09955408068028622, + "grad_norm": 2.640625, + "learning_rate": 0.00019531765882941472, + "loss": 4.6611, + "step": 240 + }, + { + "epoch": 0.09996888934978741, + "grad_norm": 2.359375, + "learning_rate": 0.0001952976488244122, + "loss": 4.8161, + "step": 241 + }, + { + "epoch": 0.1003836980192886, + "grad_norm": 2.34375, + "learning_rate": 0.00019527763881940972, + "loss": 4.6394, + "step": 242 + }, + { + "epoch": 0.1007985066887898, + "grad_norm": 2.421875, + "learning_rate": 0.0001952576288144072, + "loss": 4.5768, + "step": 243 + }, + { + "epoch": 0.10121331535829099, + "grad_norm": 2.484375, + "learning_rate": 0.00019523761880940472, + "loss": 4.5237, + "step": 244 + }, + { + "epoch": 0.10162812402779219, + "grad_norm": 2.171875, + "learning_rate": 0.0001952176088044022, + "loss": 4.5366, + "step": 245 + }, + { + "epoch": 0.10204293269729338, + "grad_norm": 2.859375, + "learning_rate": 0.00019519759879939972, + "loss": 4.647, + "step": 246 + }, + { + "epoch": 0.10245774136679457, + "grad_norm": 2.078125, + "learning_rate": 0.0001951775887943972, + "loss": 4.5444, + "step": 247 + }, + { + "epoch": 0.10287255003629577, + "grad_norm": 2.125, + "learning_rate": 0.00019515757878939472, + "loss": 4.5602, + "step": 248 + }, + { + "epoch": 0.10328735870579694, + "grad_norm": 2.25, + "learning_rate": 0.0001951375687843922, + "loss": 4.5301, + "step": 249 + }, + { + "epoch": 0.10370216737529814, + "grad_norm": 3.046875, + "learning_rate": 0.0001951175587793897, + "loss": 4.691, + "step": 250 + }, + { + "epoch": 0.10411697604479933, + "grad_norm": 2.375, + "learning_rate": 0.00019509754877438718, + "loss": 4.6174, + "step": 251 + }, + { + "epoch": 0.10453178471430052, + "grad_norm": 2.15625, + "learning_rate": 0.0001950775387693847, + "loss": 4.7134, + "step": 252 + }, + { + "epoch": 0.10494659338380172, + "grad_norm": 2.453125, + "learning_rate": 0.0001950575287643822, + "loss": 4.8516, + "step": 253 + }, + { + "epoch": 0.10536140205330291, + "grad_norm": 2.828125, + "learning_rate": 0.0001950375187593797, + "loss": 4.6675, + "step": 254 + }, + { + "epoch": 0.1057762107228041, + "grad_norm": 2.53125, + "learning_rate": 0.0001950175087543772, + "loss": 4.7508, + "step": 255 + }, + { + "epoch": 0.1061910193923053, + "grad_norm": 2.65625, + "learning_rate": 0.0001949974987493747, + "loss": 4.6769, + "step": 256 + }, + { + "epoch": 0.10660582806180649, + "grad_norm": 2.3125, + "learning_rate": 0.00019497748874437222, + "loss": 4.3861, + "step": 257 + }, + { + "epoch": 0.10702063673130768, + "grad_norm": 2.34375, + "learning_rate": 0.0001949574787393697, + "loss": 4.6051, + "step": 258 + }, + { + "epoch": 0.10743544540080888, + "grad_norm": 2.265625, + "learning_rate": 0.0001949374687343672, + "loss": 4.5368, + "step": 259 + }, + { + "epoch": 0.10785025407031007, + "grad_norm": 2.546875, + "learning_rate": 0.00019491745872936468, + "loss": 4.7008, + "step": 260 + }, + { + "epoch": 0.10826506273981126, + "grad_norm": 2.21875, + "learning_rate": 0.0001948974487243622, + "loss": 4.6742, + "step": 261 + }, + { + "epoch": 0.10867987140931246, + "grad_norm": 2.453125, + "learning_rate": 0.00019487743871935968, + "loss": 4.3499, + "step": 262 + }, + { + "epoch": 0.10909468007881365, + "grad_norm": 2.390625, + "learning_rate": 0.0001948574287143572, + "loss": 4.6754, + "step": 263 + }, + { + "epoch": 0.10950948874831484, + "grad_norm": 2.296875, + "learning_rate": 0.00019483741870935468, + "loss": 4.6755, + "step": 264 + }, + { + "epoch": 0.10992429741781604, + "grad_norm": 2.46875, + "learning_rate": 0.0001948174087043522, + "loss": 4.6689, + "step": 265 + }, + { + "epoch": 0.11033910608731723, + "grad_norm": 2.109375, + "learning_rate": 0.00019479739869934968, + "loss": 4.6618, + "step": 266 + }, + { + "epoch": 0.11075391475681842, + "grad_norm": 2.53125, + "learning_rate": 0.0001947773886943472, + "loss": 4.7117, + "step": 267 + }, + { + "epoch": 0.11116872342631962, + "grad_norm": 2.21875, + "learning_rate": 0.00019475737868934466, + "loss": 4.6274, + "step": 268 + }, + { + "epoch": 0.11158353209582081, + "grad_norm": 2.46875, + "learning_rate": 0.00019473736868434217, + "loss": 4.6007, + "step": 269 + }, + { + "epoch": 0.111998340765322, + "grad_norm": 2.53125, + "learning_rate": 0.0001947173586793397, + "loss": 4.8718, + "step": 270 + }, + { + "epoch": 0.11241314943482318, + "grad_norm": 2.734375, + "learning_rate": 0.00019469734867433717, + "loss": 4.7442, + "step": 271 + }, + { + "epoch": 0.11282795810432437, + "grad_norm": 2.390625, + "learning_rate": 0.0001946773386693347, + "loss": 4.6158, + "step": 272 + }, + { + "epoch": 0.11324276677382557, + "grad_norm": 2.453125, + "learning_rate": 0.00019465732866433218, + "loss": 4.7251, + "step": 273 + }, + { + "epoch": 0.11365757544332676, + "grad_norm": 2.453125, + "learning_rate": 0.0001946373186593297, + "loss": 4.9831, + "step": 274 + }, + { + "epoch": 0.11407238411282795, + "grad_norm": 2.421875, + "learning_rate": 0.00019461730865432718, + "loss": 4.6253, + "step": 275 + }, + { + "epoch": 0.11448719278232915, + "grad_norm": 2.125, + "learning_rate": 0.00019459729864932467, + "loss": 4.5307, + "step": 276 + }, + { + "epoch": 0.11490200145183034, + "grad_norm": 2.546875, + "learning_rate": 0.00019457728864432215, + "loss": 4.7805, + "step": 277 + }, + { + "epoch": 0.11531681012133153, + "grad_norm": 2.265625, + "learning_rate": 0.00019455727863931967, + "loss": 4.5133, + "step": 278 + }, + { + "epoch": 0.11573161879083273, + "grad_norm": 2.5, + "learning_rate": 0.00019453726863431715, + "loss": 4.4652, + "step": 279 + }, + { + "epoch": 0.11614642746033392, + "grad_norm": 2.5625, + "learning_rate": 0.00019451725862931467, + "loss": 4.7797, + "step": 280 + }, + { + "epoch": 0.11656123612983511, + "grad_norm": 2.546875, + "learning_rate": 0.00019449724862431216, + "loss": 4.6143, + "step": 281 + }, + { + "epoch": 0.1169760447993363, + "grad_norm": 2.53125, + "learning_rate": 0.00019447723861930967, + "loss": 4.7538, + "step": 282 + }, + { + "epoch": 0.1173908534688375, + "grad_norm": 2.53125, + "learning_rate": 0.00019445722861430716, + "loss": 4.6332, + "step": 283 + }, + { + "epoch": 0.1178056621383387, + "grad_norm": 2.484375, + "learning_rate": 0.00019443721860930467, + "loss": 4.5163, + "step": 284 + }, + { + "epoch": 0.11822047080783989, + "grad_norm": 2.34375, + "learning_rate": 0.00019441720860430216, + "loss": 4.4752, + "step": 285 + }, + { + "epoch": 0.11863527947734108, + "grad_norm": 2.3125, + "learning_rate": 0.00019439719859929965, + "loss": 4.5883, + "step": 286 + }, + { + "epoch": 0.11905008814684227, + "grad_norm": 2.40625, + "learning_rate": 0.00019437718859429716, + "loss": 4.723, + "step": 287 + }, + { + "epoch": 0.11946489681634347, + "grad_norm": 2.8125, + "learning_rate": 0.00019435717858929465, + "loss": 4.8239, + "step": 288 + }, + { + "epoch": 0.11987970548584466, + "grad_norm": 2.765625, + "learning_rate": 0.00019433716858429216, + "loss": 4.7219, + "step": 289 + }, + { + "epoch": 0.12029451415534585, + "grad_norm": 2.515625, + "learning_rate": 0.00019431715857928965, + "loss": 4.9378, + "step": 290 + }, + { + "epoch": 0.12070932282484705, + "grad_norm": 2.359375, + "learning_rate": 0.00019429714857428716, + "loss": 4.4392, + "step": 291 + }, + { + "epoch": 0.12112413149434823, + "grad_norm": 2.390625, + "learning_rate": 0.00019427713856928465, + "loss": 5.0761, + "step": 292 + }, + { + "epoch": 0.12153894016384942, + "grad_norm": 2.484375, + "learning_rate": 0.00019425712856428217, + "loss": 4.5624, + "step": 293 + }, + { + "epoch": 0.12195374883335061, + "grad_norm": 2.453125, + "learning_rate": 0.00019423711855927965, + "loss": 4.889, + "step": 294 + }, + { + "epoch": 0.1223685575028518, + "grad_norm": 2.28125, + "learning_rate": 0.00019421710855427714, + "loss": 4.7606, + "step": 295 + }, + { + "epoch": 0.122783366172353, + "grad_norm": 2.359375, + "learning_rate": 0.00019419709854927463, + "loss": 4.4913, + "step": 296 + }, + { + "epoch": 0.12319817484185419, + "grad_norm": 2.40625, + "learning_rate": 0.00019417708854427214, + "loss": 4.6842, + "step": 297 + }, + { + "epoch": 0.12361298351135538, + "grad_norm": 2.4375, + "learning_rate": 0.00019415707853926966, + "loss": 4.5627, + "step": 298 + }, + { + "epoch": 0.12402779218085658, + "grad_norm": 2.28125, + "learning_rate": 0.00019413706853426714, + "loss": 4.68, + "step": 299 + }, + { + "epoch": 0.12444260085035777, + "grad_norm": 2.703125, + "learning_rate": 0.00019411705852926466, + "loss": 4.5748, + "step": 300 + }, + { + "epoch": 0.12485740951985896, + "grad_norm": 2.953125, + "learning_rate": 0.00019409704852426215, + "loss": 4.5055, + "step": 301 + }, + { + "epoch": 0.12527221818936016, + "grad_norm": 2.515625, + "learning_rate": 0.00019407703851925966, + "loss": 4.6107, + "step": 302 + }, + { + "epoch": 0.12568702685886135, + "grad_norm": 2.984375, + "learning_rate": 0.00019405702851425712, + "loss": 4.531, + "step": 303 + }, + { + "epoch": 0.12610183552836254, + "grad_norm": 2.28125, + "learning_rate": 0.00019403701850925463, + "loss": 4.6775, + "step": 304 + }, + { + "epoch": 0.12651664419786374, + "grad_norm": 2.859375, + "learning_rate": 0.00019401700850425212, + "loss": 4.6577, + "step": 305 + }, + { + "epoch": 0.12693145286736493, + "grad_norm": 2.484375, + "learning_rate": 0.00019399699849924964, + "loss": 4.5784, + "step": 306 + }, + { + "epoch": 0.12734626153686612, + "grad_norm": 3.28125, + "learning_rate": 0.00019397698849424712, + "loss": 4.6397, + "step": 307 + }, + { + "epoch": 0.12776107020636732, + "grad_norm": 2.359375, + "learning_rate": 0.00019395697848924464, + "loss": 4.7788, + "step": 308 + }, + { + "epoch": 0.1281758788758685, + "grad_norm": 2.546875, + "learning_rate": 0.00019393696848424212, + "loss": 5.0678, + "step": 309 + }, + { + "epoch": 0.1285906875453697, + "grad_norm": 2.328125, + "learning_rate": 0.00019391695847923964, + "loss": 4.4743, + "step": 310 + }, + { + "epoch": 0.1290054962148709, + "grad_norm": 2.84375, + "learning_rate": 0.00019389694847423713, + "loss": 4.6317, + "step": 311 + }, + { + "epoch": 0.1294203048843721, + "grad_norm": 2.3125, + "learning_rate": 0.00019387693846923461, + "loss": 4.6397, + "step": 312 + }, + { + "epoch": 0.12983511355387328, + "grad_norm": 2.40625, + "learning_rate": 0.00019385692846423213, + "loss": 4.5141, + "step": 313 + }, + { + "epoch": 0.13024992222337448, + "grad_norm": 2.390625, + "learning_rate": 0.00019383691845922962, + "loss": 4.6465, + "step": 314 + }, + { + "epoch": 0.13066473089287567, + "grad_norm": 2.390625, + "learning_rate": 0.00019381690845422713, + "loss": 4.8338, + "step": 315 + }, + { + "epoch": 0.13107953956237686, + "grad_norm": 2.421875, + "learning_rate": 0.00019379689844922462, + "loss": 4.6736, + "step": 316 + }, + { + "epoch": 0.13149434823187806, + "grad_norm": 2.859375, + "learning_rate": 0.00019377688844422213, + "loss": 4.7102, + "step": 317 + }, + { + "epoch": 0.13190915690137925, + "grad_norm": 2.46875, + "learning_rate": 0.00019375687843921962, + "loss": 4.6496, + "step": 318 + }, + { + "epoch": 0.13232396557088044, + "grad_norm": 2.140625, + "learning_rate": 0.00019373686843421713, + "loss": 4.8149, + "step": 319 + }, + { + "epoch": 0.13273877424038164, + "grad_norm": 2.21875, + "learning_rate": 0.00019371685842921462, + "loss": 4.5535, + "step": 320 + }, + { + "epoch": 0.13315358290988283, + "grad_norm": 2.78125, + "learning_rate": 0.0001936968484242121, + "loss": 4.5018, + "step": 321 + }, + { + "epoch": 0.13356839157938402, + "grad_norm": 2.484375, + "learning_rate": 0.0001936768384192096, + "loss": 4.6553, + "step": 322 + }, + { + "epoch": 0.13398320024888521, + "grad_norm": 2.390625, + "learning_rate": 0.0001936568284142071, + "loss": 4.8589, + "step": 323 + }, + { + "epoch": 0.1343980089183864, + "grad_norm": 2.515625, + "learning_rate": 0.0001936368184092046, + "loss": 4.8949, + "step": 324 + }, + { + "epoch": 0.13481281758788757, + "grad_norm": 2.3125, + "learning_rate": 0.0001936168084042021, + "loss": 4.5917, + "step": 325 + }, + { + "epoch": 0.13522762625738877, + "grad_norm": 2.765625, + "learning_rate": 0.00019359679839919963, + "loss": 4.5207, + "step": 326 + }, + { + "epoch": 0.13564243492688996, + "grad_norm": 3.015625, + "learning_rate": 0.0001935767883941971, + "loss": 4.6245, + "step": 327 + }, + { + "epoch": 0.13605724359639115, + "grad_norm": 2.265625, + "learning_rate": 0.00019355677838919463, + "loss": 4.5721, + "step": 328 + }, + { + "epoch": 0.13647205226589235, + "grad_norm": 2.171875, + "learning_rate": 0.00019353676838419211, + "loss": 4.5226, + "step": 329 + }, + { + "epoch": 0.13688686093539354, + "grad_norm": 2.203125, + "learning_rate": 0.0001935167583791896, + "loss": 4.4595, + "step": 330 + }, + { + "epoch": 0.13730166960489473, + "grad_norm": 2.328125, + "learning_rate": 0.0001934967483741871, + "loss": 4.941, + "step": 331 + }, + { + "epoch": 0.13771647827439593, + "grad_norm": 2.296875, + "learning_rate": 0.0001934767383691846, + "loss": 4.523, + "step": 332 + }, + { + "epoch": 0.13813128694389712, + "grad_norm": 2.234375, + "learning_rate": 0.0001934567283641821, + "loss": 4.5625, + "step": 333 + }, + { + "epoch": 0.1385460956133983, + "grad_norm": 3.140625, + "learning_rate": 0.0001934367183591796, + "loss": 4.7129, + "step": 334 + }, + { + "epoch": 0.1389609042828995, + "grad_norm": 2.609375, + "learning_rate": 0.0001934167083541771, + "loss": 4.5963, + "step": 335 + }, + { + "epoch": 0.1393757129524007, + "grad_norm": 2.5, + "learning_rate": 0.0001933966983491746, + "loss": 4.4981, + "step": 336 + }, + { + "epoch": 0.1397905216219019, + "grad_norm": 2.453125, + "learning_rate": 0.0001933766883441721, + "loss": 4.4765, + "step": 337 + }, + { + "epoch": 0.14020533029140309, + "grad_norm": 2.28125, + "learning_rate": 0.00019335667833916958, + "loss": 4.7399, + "step": 338 + }, + { + "epoch": 0.14062013896090428, + "grad_norm": 2.59375, + "learning_rate": 0.0001933366683341671, + "loss": 4.5903, + "step": 339 + }, + { + "epoch": 0.14103494763040547, + "grad_norm": 3.40625, + "learning_rate": 0.00019331665832916458, + "loss": 4.7435, + "step": 340 + }, + { + "epoch": 0.14144975629990666, + "grad_norm": 2.734375, + "learning_rate": 0.0001932966483241621, + "loss": 4.7956, + "step": 341 + }, + { + "epoch": 0.14186456496940786, + "grad_norm": 2.390625, + "learning_rate": 0.00019327663831915958, + "loss": 4.7103, + "step": 342 + }, + { + "epoch": 0.14227937363890905, + "grad_norm": 2.4375, + "learning_rate": 0.0001932566283141571, + "loss": 4.6113, + "step": 343 + }, + { + "epoch": 0.14269418230841024, + "grad_norm": 2.46875, + "learning_rate": 0.00019323661830915459, + "loss": 4.7041, + "step": 344 + }, + { + "epoch": 0.14310899097791144, + "grad_norm": 2.390625, + "learning_rate": 0.0001932166083041521, + "loss": 4.6125, + "step": 345 + }, + { + "epoch": 0.14352379964741263, + "grad_norm": 2.3125, + "learning_rate": 0.0001931965982991496, + "loss": 4.4779, + "step": 346 + }, + { + "epoch": 0.14393860831691382, + "grad_norm": 2.75, + "learning_rate": 0.00019317658829414708, + "loss": 4.4845, + "step": 347 + }, + { + "epoch": 0.14435341698641502, + "grad_norm": 2.703125, + "learning_rate": 0.00019315657828914456, + "loss": 4.5883, + "step": 348 + }, + { + "epoch": 0.1447682256559162, + "grad_norm": 2.1875, + "learning_rate": 0.00019313656828414208, + "loss": 4.6283, + "step": 349 + }, + { + "epoch": 0.1451830343254174, + "grad_norm": 2.28125, + "learning_rate": 0.00019311655827913956, + "loss": 4.6821, + "step": 350 + }, + { + "epoch": 0.1455978429949186, + "grad_norm": 2.5, + "learning_rate": 0.00019309654827413708, + "loss": 4.2464, + "step": 351 + }, + { + "epoch": 0.1460126516644198, + "grad_norm": 2.484375, + "learning_rate": 0.00019307653826913457, + "loss": 4.4201, + "step": 352 + }, + { + "epoch": 0.14642746033392098, + "grad_norm": 2.984375, + "learning_rate": 0.00019305652826413208, + "loss": 4.8415, + "step": 353 + }, + { + "epoch": 0.14684226900342218, + "grad_norm": 2.53125, + "learning_rate": 0.0001930365182591296, + "loss": 4.612, + "step": 354 + }, + { + "epoch": 0.14725707767292337, + "grad_norm": 2.25, + "learning_rate": 0.00019301650825412708, + "loss": 4.7386, + "step": 355 + }, + { + "epoch": 0.14767188634242456, + "grad_norm": 2.171875, + "learning_rate": 0.00019299649824912457, + "loss": 4.6206, + "step": 356 + }, + { + "epoch": 0.14808669501192576, + "grad_norm": 2.28125, + "learning_rate": 0.00019297648824412206, + "loss": 4.7842, + "step": 357 + }, + { + "epoch": 0.14850150368142695, + "grad_norm": 2.3125, + "learning_rate": 0.00019295647823911957, + "loss": 4.4395, + "step": 358 + }, + { + "epoch": 0.14891631235092814, + "grad_norm": 2.09375, + "learning_rate": 0.00019293646823411706, + "loss": 4.8042, + "step": 359 + }, + { + "epoch": 0.14933112102042934, + "grad_norm": 2.09375, + "learning_rate": 0.00019291645822911457, + "loss": 4.5574, + "step": 360 + }, + { + "epoch": 0.14974592968993053, + "grad_norm": 2.234375, + "learning_rate": 0.00019289644822411206, + "loss": 4.5097, + "step": 361 + }, + { + "epoch": 0.15016073835943172, + "grad_norm": 2.4375, + "learning_rate": 0.00019287643821910957, + "loss": 4.6248, + "step": 362 + }, + { + "epoch": 0.15057554702893292, + "grad_norm": 2.34375, + "learning_rate": 0.00019285642821410706, + "loss": 4.5815, + "step": 363 + }, + { + "epoch": 0.1509903556984341, + "grad_norm": 2.3125, + "learning_rate": 0.00019283641820910458, + "loss": 4.6993, + "step": 364 + }, + { + "epoch": 0.1514051643679353, + "grad_norm": 2.6875, + "learning_rate": 0.00019281640820410206, + "loss": 4.9004, + "step": 365 + }, + { + "epoch": 0.1518199730374365, + "grad_norm": 2.40625, + "learning_rate": 0.00019279639819909955, + "loss": 4.6554, + "step": 366 + }, + { + "epoch": 0.1522347817069377, + "grad_norm": 2.15625, + "learning_rate": 0.00019277638819409706, + "loss": 4.3681, + "step": 367 + }, + { + "epoch": 0.15264959037643885, + "grad_norm": 2.328125, + "learning_rate": 0.00019275637818909455, + "loss": 4.3656, + "step": 368 + }, + { + "epoch": 0.15306439904594005, + "grad_norm": 2.40625, + "learning_rate": 0.00019273636818409207, + "loss": 4.5683, + "step": 369 + }, + { + "epoch": 0.15347920771544124, + "grad_norm": 2.265625, + "learning_rate": 0.00019271635817908955, + "loss": 4.7063, + "step": 370 + }, + { + "epoch": 0.15389401638494243, + "grad_norm": 2.515625, + "learning_rate": 0.00019269634817408707, + "loss": 4.7527, + "step": 371 + }, + { + "epoch": 0.15430882505444363, + "grad_norm": 2.328125, + "learning_rate": 0.00019267633816908456, + "loss": 4.6669, + "step": 372 + }, + { + "epoch": 0.15472363372394482, + "grad_norm": 2.1875, + "learning_rate": 0.00019265632816408204, + "loss": 4.5687, + "step": 373 + }, + { + "epoch": 0.155138442393446, + "grad_norm": 2.28125, + "learning_rate": 0.00019263631815907953, + "loss": 4.8767, + "step": 374 + }, + { + "epoch": 0.1555532510629472, + "grad_norm": 2.21875, + "learning_rate": 0.00019261630815407704, + "loss": 4.5858, + "step": 375 + }, + { + "epoch": 0.1559680597324484, + "grad_norm": 2.09375, + "learning_rate": 0.00019259629814907453, + "loss": 4.4694, + "step": 376 + }, + { + "epoch": 0.1563828684019496, + "grad_norm": 2.5, + "learning_rate": 0.00019257628814407205, + "loss": 4.4371, + "step": 377 + }, + { + "epoch": 0.1567976770714508, + "grad_norm": 2.171875, + "learning_rate": 0.00019255627813906953, + "loss": 4.4846, + "step": 378 + }, + { + "epoch": 0.15721248574095198, + "grad_norm": 2.328125, + "learning_rate": 0.00019253626813406705, + "loss": 4.7829, + "step": 379 + }, + { + "epoch": 0.15762729441045317, + "grad_norm": 2.234375, + "learning_rate": 0.00019251625812906453, + "loss": 4.7909, + "step": 380 + }, + { + "epoch": 0.15804210307995437, + "grad_norm": 2.296875, + "learning_rate": 0.00019249624812406205, + "loss": 4.349, + "step": 381 + }, + { + "epoch": 0.15845691174945556, + "grad_norm": 2.375, + "learning_rate": 0.00019247623811905954, + "loss": 4.518, + "step": 382 + }, + { + "epoch": 0.15887172041895675, + "grad_norm": 2.296875, + "learning_rate": 0.00019245622811405702, + "loss": 4.7616, + "step": 383 + }, + { + "epoch": 0.15928652908845795, + "grad_norm": 2.796875, + "learning_rate": 0.00019243621810905454, + "loss": 4.5117, + "step": 384 + }, + { + "epoch": 0.15970133775795914, + "grad_norm": 2.28125, + "learning_rate": 0.00019241620810405203, + "loss": 4.5693, + "step": 385 + }, + { + "epoch": 0.16011614642746033, + "grad_norm": 2.234375, + "learning_rate": 0.00019239619809904954, + "loss": 4.748, + "step": 386 + }, + { + "epoch": 0.16053095509696153, + "grad_norm": 2.3125, + "learning_rate": 0.00019237618809404703, + "loss": 4.6349, + "step": 387 + }, + { + "epoch": 0.16094576376646272, + "grad_norm": 2.359375, + "learning_rate": 0.00019235617808904454, + "loss": 4.6104, + "step": 388 + }, + { + "epoch": 0.1613605724359639, + "grad_norm": 2.71875, + "learning_rate": 0.00019233616808404203, + "loss": 4.6278, + "step": 389 + }, + { + "epoch": 0.1617753811054651, + "grad_norm": 2.21875, + "learning_rate": 0.00019231615807903954, + "loss": 4.6366, + "step": 390 + }, + { + "epoch": 0.1621901897749663, + "grad_norm": 2.390625, + "learning_rate": 0.00019229614807403703, + "loss": 4.6784, + "step": 391 + }, + { + "epoch": 0.1626049984444675, + "grad_norm": 2.3125, + "learning_rate": 0.00019227613806903452, + "loss": 4.5775, + "step": 392 + }, + { + "epoch": 0.16301980711396868, + "grad_norm": 2.5, + "learning_rate": 0.000192256128064032, + "loss": 4.7694, + "step": 393 + }, + { + "epoch": 0.16343461578346988, + "grad_norm": 2.4375, + "learning_rate": 0.00019223611805902952, + "loss": 4.3015, + "step": 394 + }, + { + "epoch": 0.16384942445297107, + "grad_norm": 2.25, + "learning_rate": 0.00019221610805402703, + "loss": 4.5376, + "step": 395 + }, + { + "epoch": 0.16426423312247226, + "grad_norm": 2.578125, + "learning_rate": 0.00019219609804902452, + "loss": 4.5119, + "step": 396 + }, + { + "epoch": 0.16467904179197346, + "grad_norm": 2.5625, + "learning_rate": 0.00019217608804402204, + "loss": 4.6437, + "step": 397 + }, + { + "epoch": 0.16509385046147465, + "grad_norm": 2.765625, + "learning_rate": 0.00019215607803901952, + "loss": 4.7193, + "step": 398 + }, + { + "epoch": 0.16550865913097584, + "grad_norm": 2.0625, + "learning_rate": 0.00019213606803401704, + "loss": 4.5158, + "step": 399 + }, + { + "epoch": 0.16592346780047704, + "grad_norm": 2.34375, + "learning_rate": 0.00019211605802901452, + "loss": 4.9949, + "step": 400 + }, + { + "epoch": 0.16633827646997823, + "grad_norm": 2.15625, + "learning_rate": 0.000192096048024012, + "loss": 4.7789, + "step": 401 + }, + { + "epoch": 0.16675308513947942, + "grad_norm": 2.515625, + "learning_rate": 0.0001920760380190095, + "loss": 4.6586, + "step": 402 + }, + { + "epoch": 0.16716789380898062, + "grad_norm": 2.5, + "learning_rate": 0.000192056028014007, + "loss": 4.5197, + "step": 403 + }, + { + "epoch": 0.1675827024784818, + "grad_norm": 2.484375, + "learning_rate": 0.0001920360180090045, + "loss": 4.6449, + "step": 404 + }, + { + "epoch": 0.167997511147983, + "grad_norm": 2.125, + "learning_rate": 0.00019201600800400202, + "loss": 4.6291, + "step": 405 + }, + { + "epoch": 0.1684123198174842, + "grad_norm": 2.171875, + "learning_rate": 0.0001919959979989995, + "loss": 4.7104, + "step": 406 + }, + { + "epoch": 0.1688271284869854, + "grad_norm": 2.0625, + "learning_rate": 0.00019197598799399702, + "loss": 4.7399, + "step": 407 + }, + { + "epoch": 0.16924193715648658, + "grad_norm": 2.28125, + "learning_rate": 0.0001919559779889945, + "loss": 4.3293, + "step": 408 + }, + { + "epoch": 0.16965674582598778, + "grad_norm": 2.1875, + "learning_rate": 0.000191935967983992, + "loss": 4.4642, + "step": 409 + }, + { + "epoch": 0.17007155449548897, + "grad_norm": 2.453125, + "learning_rate": 0.0001919159579789895, + "loss": 4.5354, + "step": 410 + }, + { + "epoch": 0.17048636316499013, + "grad_norm": 2.15625, + "learning_rate": 0.000191895947973987, + "loss": 4.4258, + "step": 411 + }, + { + "epoch": 0.17090117183449133, + "grad_norm": 2.328125, + "learning_rate": 0.0001918759379689845, + "loss": 4.7604, + "step": 412 + }, + { + "epoch": 0.17131598050399252, + "grad_norm": 2.6875, + "learning_rate": 0.000191855927963982, + "loss": 4.9124, + "step": 413 + }, + { + "epoch": 0.17173078917349371, + "grad_norm": 2.0625, + "learning_rate": 0.0001918359179589795, + "loss": 4.8301, + "step": 414 + }, + { + "epoch": 0.1721455978429949, + "grad_norm": 2.296875, + "learning_rate": 0.000191815907953977, + "loss": 4.7501, + "step": 415 + }, + { + "epoch": 0.1725604065124961, + "grad_norm": 2.171875, + "learning_rate": 0.0001917958979489745, + "loss": 4.5007, + "step": 416 + }, + { + "epoch": 0.1729752151819973, + "grad_norm": 2.75, + "learning_rate": 0.000191775887943972, + "loss": 4.7086, + "step": 417 + }, + { + "epoch": 0.1733900238514985, + "grad_norm": 2.359375, + "learning_rate": 0.00019175587793896949, + "loss": 4.5778, + "step": 418 + }, + { + "epoch": 0.17380483252099968, + "grad_norm": 2.296875, + "learning_rate": 0.00019173586793396697, + "loss": 4.529, + "step": 419 + }, + { + "epoch": 0.17421964119050087, + "grad_norm": 2.53125, + "learning_rate": 0.0001917158579289645, + "loss": 4.4016, + "step": 420 + }, + { + "epoch": 0.17463444986000207, + "grad_norm": 2.1875, + "learning_rate": 0.00019169584792396197, + "loss": 4.4806, + "step": 421 + }, + { + "epoch": 0.17504925852950326, + "grad_norm": 2.453125, + "learning_rate": 0.0001916758379189595, + "loss": 4.6308, + "step": 422 + }, + { + "epoch": 0.17546406719900445, + "grad_norm": 2.28125, + "learning_rate": 0.000191655827913957, + "loss": 4.5926, + "step": 423 + }, + { + "epoch": 0.17587887586850565, + "grad_norm": 2.328125, + "learning_rate": 0.0001916358179089545, + "loss": 4.3179, + "step": 424 + }, + { + "epoch": 0.17629368453800684, + "grad_norm": 2.625, + "learning_rate": 0.000191615807903952, + "loss": 4.5057, + "step": 425 + }, + { + "epoch": 0.17670849320750803, + "grad_norm": 2.484375, + "learning_rate": 0.0001915957978989495, + "loss": 4.6446, + "step": 426 + }, + { + "epoch": 0.17712330187700923, + "grad_norm": 2.21875, + "learning_rate": 0.00019157578789394698, + "loss": 4.7132, + "step": 427 + }, + { + "epoch": 0.17753811054651042, + "grad_norm": 2.296875, + "learning_rate": 0.00019155577788894447, + "loss": 4.4011, + "step": 428 + }, + { + "epoch": 0.1779529192160116, + "grad_norm": 2.3125, + "learning_rate": 0.00019153576788394198, + "loss": 4.7595, + "step": 429 + }, + { + "epoch": 0.1783677278855128, + "grad_norm": 2.171875, + "learning_rate": 0.00019151575787893947, + "loss": 4.5743, + "step": 430 + }, + { + "epoch": 0.178782536555014, + "grad_norm": 2.265625, + "learning_rate": 0.00019149574787393698, + "loss": 4.6784, + "step": 431 + }, + { + "epoch": 0.1791973452245152, + "grad_norm": 2.4375, + "learning_rate": 0.00019147573786893447, + "loss": 4.4957, + "step": 432 + }, + { + "epoch": 0.17961215389401639, + "grad_norm": 2.59375, + "learning_rate": 0.00019145572786393198, + "loss": 4.4072, + "step": 433 + }, + { + "epoch": 0.18002696256351758, + "grad_norm": 2.4375, + "learning_rate": 0.00019143571785892947, + "loss": 4.6275, + "step": 434 + }, + { + "epoch": 0.18044177123301877, + "grad_norm": 2.59375, + "learning_rate": 0.00019141570785392699, + "loss": 4.6934, + "step": 435 + }, + { + "epoch": 0.18085657990251996, + "grad_norm": 2.234375, + "learning_rate": 0.00019139569784892447, + "loss": 4.6939, + "step": 436 + }, + { + "epoch": 0.18127138857202116, + "grad_norm": 2.5, + "learning_rate": 0.00019137568784392196, + "loss": 4.6987, + "step": 437 + }, + { + "epoch": 0.18168619724152235, + "grad_norm": 2.421875, + "learning_rate": 0.00019135567783891947, + "loss": 4.5695, + "step": 438 + }, + { + "epoch": 0.18210100591102354, + "grad_norm": 2.171875, + "learning_rate": 0.00019133566783391696, + "loss": 4.7376, + "step": 439 + }, + { + "epoch": 0.18251581458052474, + "grad_norm": 2.640625, + "learning_rate": 0.00019131565782891448, + "loss": 4.5068, + "step": 440 + }, + { + "epoch": 0.18293062325002593, + "grad_norm": 2.734375, + "learning_rate": 0.00019129564782391196, + "loss": 4.4134, + "step": 441 + }, + { + "epoch": 0.18334543191952712, + "grad_norm": 2.453125, + "learning_rate": 0.00019127563781890948, + "loss": 4.5897, + "step": 442 + }, + { + "epoch": 0.18376024058902832, + "grad_norm": 2.15625, + "learning_rate": 0.00019125562781390697, + "loss": 4.5844, + "step": 443 + }, + { + "epoch": 0.1841750492585295, + "grad_norm": 2.328125, + "learning_rate": 0.00019123561780890445, + "loss": 4.6493, + "step": 444 + }, + { + "epoch": 0.1845898579280307, + "grad_norm": 2.515625, + "learning_rate": 0.00019121560780390194, + "loss": 4.4406, + "step": 445 + }, + { + "epoch": 0.1850046665975319, + "grad_norm": 2.34375, + "learning_rate": 0.00019119559779889945, + "loss": 4.5867, + "step": 446 + }, + { + "epoch": 0.1854194752670331, + "grad_norm": 2.453125, + "learning_rate": 0.00019117558779389694, + "loss": 4.7002, + "step": 447 + }, + { + "epoch": 0.18583428393653428, + "grad_norm": 2.703125, + "learning_rate": 0.00019115557778889446, + "loss": 4.7154, + "step": 448 + }, + { + "epoch": 0.18624909260603548, + "grad_norm": 2.25, + "learning_rate": 0.00019113556778389194, + "loss": 4.5751, + "step": 449 + }, + { + "epoch": 0.18666390127553667, + "grad_norm": 2.5, + "learning_rate": 0.00019111555777888946, + "loss": 4.448, + "step": 450 + }, + { + "epoch": 0.18707870994503786, + "grad_norm": 2.234375, + "learning_rate": 0.00019109554777388697, + "loss": 4.4763, + "step": 451 + }, + { + "epoch": 0.18749351861453906, + "grad_norm": 2.234375, + "learning_rate": 0.00019107553776888446, + "loss": 4.795, + "step": 452 + }, + { + "epoch": 0.18790832728404025, + "grad_norm": 2.203125, + "learning_rate": 0.00019105552776388195, + "loss": 4.6979, + "step": 453 + }, + { + "epoch": 0.18832313595354144, + "grad_norm": 2.421875, + "learning_rate": 0.00019103551775887943, + "loss": 4.3613, + "step": 454 + }, + { + "epoch": 0.1887379446230426, + "grad_norm": 2.203125, + "learning_rate": 0.00019101550775387695, + "loss": 4.4992, + "step": 455 + }, + { + "epoch": 0.1891527532925438, + "grad_norm": 2.703125, + "learning_rate": 0.00019099549774887444, + "loss": 4.6909, + "step": 456 + }, + { + "epoch": 0.189567561962045, + "grad_norm": 2.390625, + "learning_rate": 0.00019097548774387195, + "loss": 4.6363, + "step": 457 + }, + { + "epoch": 0.1899823706315462, + "grad_norm": 2.28125, + "learning_rate": 0.00019095547773886944, + "loss": 4.551, + "step": 458 + }, + { + "epoch": 0.19039717930104738, + "grad_norm": 2.40625, + "learning_rate": 0.00019093546773386695, + "loss": 4.5158, + "step": 459 + }, + { + "epoch": 0.19081198797054857, + "grad_norm": 2.578125, + "learning_rate": 0.00019091545772886444, + "loss": 4.6661, + "step": 460 + }, + { + "epoch": 0.19122679664004977, + "grad_norm": 2.3125, + "learning_rate": 0.00019089544772386195, + "loss": 4.2847, + "step": 461 + }, + { + "epoch": 0.19164160530955096, + "grad_norm": 2.328125, + "learning_rate": 0.00019087543771885944, + "loss": 4.4697, + "step": 462 + }, + { + "epoch": 0.19205641397905215, + "grad_norm": 2.078125, + "learning_rate": 0.00019085542771385693, + "loss": 4.4851, + "step": 463 + }, + { + "epoch": 0.19247122264855335, + "grad_norm": 2.015625, + "learning_rate": 0.00019083541770885444, + "loss": 4.4844, + "step": 464 + }, + { + "epoch": 0.19288603131805454, + "grad_norm": 2.203125, + "learning_rate": 0.00019081540770385193, + "loss": 4.7539, + "step": 465 + }, + { + "epoch": 0.19330083998755573, + "grad_norm": 1.9765625, + "learning_rate": 0.00019079539769884944, + "loss": 4.3977, + "step": 466 + }, + { + "epoch": 0.19371564865705693, + "grad_norm": 2.171875, + "learning_rate": 0.00019077538769384693, + "loss": 4.3886, + "step": 467 + }, + { + "epoch": 0.19413045732655812, + "grad_norm": 2.40625, + "learning_rate": 0.00019075537768884445, + "loss": 4.7598, + "step": 468 + }, + { + "epoch": 0.1945452659960593, + "grad_norm": 2.171875, + "learning_rate": 0.00019073536768384193, + "loss": 4.6188, + "step": 469 + }, + { + "epoch": 0.1949600746655605, + "grad_norm": 2.25, + "learning_rate": 0.00019071535767883945, + "loss": 4.4196, + "step": 470 + }, + { + "epoch": 0.1953748833350617, + "grad_norm": 2.046875, + "learning_rate": 0.00019069534767383693, + "loss": 4.5675, + "step": 471 + }, + { + "epoch": 0.1957896920045629, + "grad_norm": 2.265625, + "learning_rate": 0.00019067533766883442, + "loss": 4.5032, + "step": 472 + }, + { + "epoch": 0.1962045006740641, + "grad_norm": 2.46875, + "learning_rate": 0.0001906553276638319, + "loss": 4.5395, + "step": 473 + }, + { + "epoch": 0.19661930934356528, + "grad_norm": 2.46875, + "learning_rate": 0.00019063531765882942, + "loss": 4.5965, + "step": 474 + }, + { + "epoch": 0.19703411801306647, + "grad_norm": 2.40625, + "learning_rate": 0.0001906153076538269, + "loss": 4.8952, + "step": 475 + }, + { + "epoch": 0.19744892668256767, + "grad_norm": 2.234375, + "learning_rate": 0.00019059529764882443, + "loss": 4.5312, + "step": 476 + }, + { + "epoch": 0.19786373535206886, + "grad_norm": 2.046875, + "learning_rate": 0.0001905752876438219, + "loss": 4.576, + "step": 477 + }, + { + "epoch": 0.19827854402157005, + "grad_norm": 2.421875, + "learning_rate": 0.00019055527763881943, + "loss": 4.3351, + "step": 478 + }, + { + "epoch": 0.19869335269107125, + "grad_norm": 2.078125, + "learning_rate": 0.00019053526763381691, + "loss": 4.5906, + "step": 479 + }, + { + "epoch": 0.19910816136057244, + "grad_norm": 2.296875, + "learning_rate": 0.0001905152576288144, + "loss": 4.4678, + "step": 480 + }, + { + "epoch": 0.19952297003007363, + "grad_norm": 2.21875, + "learning_rate": 0.00019049524762381192, + "loss": 4.3641, + "step": 481 + }, + { + "epoch": 0.19993777869957483, + "grad_norm": 2.328125, + "learning_rate": 0.0001904752376188094, + "loss": 4.4432, + "step": 482 + }, + { + "epoch": 0.20035258736907602, + "grad_norm": 2.390625, + "learning_rate": 0.00019045522761380692, + "loss": 4.4244, + "step": 483 + }, + { + "epoch": 0.2007673960385772, + "grad_norm": 2.21875, + "learning_rate": 0.0001904352176088044, + "loss": 4.5019, + "step": 484 + }, + { + "epoch": 0.2011822047080784, + "grad_norm": 2.34375, + "learning_rate": 0.00019041520760380192, + "loss": 4.5012, + "step": 485 + }, + { + "epoch": 0.2015970133775796, + "grad_norm": 2.203125, + "learning_rate": 0.0001903951975987994, + "loss": 4.6173, + "step": 486 + }, + { + "epoch": 0.2020118220470808, + "grad_norm": 2.59375, + "learning_rate": 0.00019037518759379692, + "loss": 4.6062, + "step": 487 + }, + { + "epoch": 0.20242663071658198, + "grad_norm": 2.109375, + "learning_rate": 0.0001903551775887944, + "loss": 4.4623, + "step": 488 + }, + { + "epoch": 0.20284143938608318, + "grad_norm": 2.15625, + "learning_rate": 0.0001903351675837919, + "loss": 4.5057, + "step": 489 + }, + { + "epoch": 0.20325624805558437, + "grad_norm": 1.984375, + "learning_rate": 0.00019031515757878938, + "loss": 4.3361, + "step": 490 + }, + { + "epoch": 0.20367105672508556, + "grad_norm": 2.21875, + "learning_rate": 0.0001902951475737869, + "loss": 4.3838, + "step": 491 + }, + { + "epoch": 0.20408586539458676, + "grad_norm": 2.3125, + "learning_rate": 0.0001902751375687844, + "loss": 4.5128, + "step": 492 + }, + { + "epoch": 0.20450067406408795, + "grad_norm": 2.515625, + "learning_rate": 0.0001902551275637819, + "loss": 4.4631, + "step": 493 + }, + { + "epoch": 0.20491548273358914, + "grad_norm": 2.328125, + "learning_rate": 0.0001902351175587794, + "loss": 4.7555, + "step": 494 + }, + { + "epoch": 0.20533029140309034, + "grad_norm": 3.078125, + "learning_rate": 0.0001902151075537769, + "loss": 4.8614, + "step": 495 + }, + { + "epoch": 0.20574510007259153, + "grad_norm": 2.1875, + "learning_rate": 0.00019019509754877441, + "loss": 4.5543, + "step": 496 + }, + { + "epoch": 0.20615990874209272, + "grad_norm": 2.4375, + "learning_rate": 0.0001901750875437719, + "loss": 4.5363, + "step": 497 + }, + { + "epoch": 0.2065747174115939, + "grad_norm": 2.28125, + "learning_rate": 0.0001901550775387694, + "loss": 4.6619, + "step": 498 + }, + { + "epoch": 0.20698952608109508, + "grad_norm": 2.265625, + "learning_rate": 0.00019013506753376688, + "loss": 4.3184, + "step": 499 + }, + { + "epoch": 0.20740433475059628, + "grad_norm": 2.28125, + "learning_rate": 0.0001901150575287644, + "loss": 4.3842, + "step": 500 + }, + { + "epoch": 0.20781914342009747, + "grad_norm": 2.375, + "learning_rate": 0.00019009504752376188, + "loss": 4.5367, + "step": 501 + }, + { + "epoch": 0.20823395208959866, + "grad_norm": 2.359375, + "learning_rate": 0.0001900750375187594, + "loss": 4.9936, + "step": 502 + }, + { + "epoch": 0.20864876075909985, + "grad_norm": 2.1875, + "learning_rate": 0.00019005502751375688, + "loss": 4.4494, + "step": 503 + }, + { + "epoch": 0.20906356942860105, + "grad_norm": 2.203125, + "learning_rate": 0.0001900350175087544, + "loss": 4.5118, + "step": 504 + }, + { + "epoch": 0.20947837809810224, + "grad_norm": 2.15625, + "learning_rate": 0.00019001500750375188, + "loss": 4.6193, + "step": 505 + }, + { + "epoch": 0.20989318676760343, + "grad_norm": 2.359375, + "learning_rate": 0.0001899949974987494, + "loss": 4.364, + "step": 506 + }, + { + "epoch": 0.21030799543710463, + "grad_norm": 2.234375, + "learning_rate": 0.00018997498749374688, + "loss": 4.5113, + "step": 507 + }, + { + "epoch": 0.21072280410660582, + "grad_norm": 2.125, + "learning_rate": 0.00018995497748874437, + "loss": 4.4587, + "step": 508 + }, + { + "epoch": 0.21113761277610701, + "grad_norm": 2.109375, + "learning_rate": 0.00018993496748374188, + "loss": 4.5137, + "step": 509 + }, + { + "epoch": 0.2115524214456082, + "grad_norm": 2.296875, + "learning_rate": 0.00018991495747873937, + "loss": 4.4468, + "step": 510 + }, + { + "epoch": 0.2119672301151094, + "grad_norm": 2.140625, + "learning_rate": 0.00018989494747373689, + "loss": 4.395, + "step": 511 + }, + { + "epoch": 0.2123820387846106, + "grad_norm": 2.140625, + "learning_rate": 0.00018987493746873437, + "loss": 4.3873, + "step": 512 + }, + { + "epoch": 0.2127968474541118, + "grad_norm": 2.28125, + "learning_rate": 0.0001898549274637319, + "loss": 4.3558, + "step": 513 + }, + { + "epoch": 0.21321165612361298, + "grad_norm": 2.421875, + "learning_rate": 0.00018983491745872938, + "loss": 4.548, + "step": 514 + }, + { + "epoch": 0.21362646479311417, + "grad_norm": 2.359375, + "learning_rate": 0.00018981490745372686, + "loss": 4.5822, + "step": 515 + }, + { + "epoch": 0.21404127346261537, + "grad_norm": 2.359375, + "learning_rate": 0.00018979489744872435, + "loss": 4.5558, + "step": 516 + }, + { + "epoch": 0.21445608213211656, + "grad_norm": 2.15625, + "learning_rate": 0.00018977488744372186, + "loss": 4.5747, + "step": 517 + }, + { + "epoch": 0.21487089080161775, + "grad_norm": 2.140625, + "learning_rate": 0.00018975487743871935, + "loss": 4.3788, + "step": 518 + }, + { + "epoch": 0.21528569947111895, + "grad_norm": 2.140625, + "learning_rate": 0.00018973486743371687, + "loss": 4.5583, + "step": 519 + }, + { + "epoch": 0.21570050814062014, + "grad_norm": 2.265625, + "learning_rate": 0.00018971485742871438, + "loss": 4.583, + "step": 520 + }, + { + "epoch": 0.21611531681012133, + "grad_norm": 2.359375, + "learning_rate": 0.00018969484742371187, + "loss": 4.5199, + "step": 521 + }, + { + "epoch": 0.21653012547962253, + "grad_norm": 2.03125, + "learning_rate": 0.00018967483741870938, + "loss": 4.5162, + "step": 522 + }, + { + "epoch": 0.21694493414912372, + "grad_norm": 2.140625, + "learning_rate": 0.00018965482741370687, + "loss": 4.5838, + "step": 523 + }, + { + "epoch": 0.2173597428186249, + "grad_norm": 2.265625, + "learning_rate": 0.00018963481740870436, + "loss": 4.5335, + "step": 524 + }, + { + "epoch": 0.2177745514881261, + "grad_norm": 2.484375, + "learning_rate": 0.00018961480740370184, + "loss": 4.7659, + "step": 525 + }, + { + "epoch": 0.2181893601576273, + "grad_norm": 2.328125, + "learning_rate": 0.00018959479739869936, + "loss": 4.2365, + "step": 526 + }, + { + "epoch": 0.2186041688271285, + "grad_norm": 2.15625, + "learning_rate": 0.00018957478739369685, + "loss": 4.7451, + "step": 527 + }, + { + "epoch": 0.21901897749662969, + "grad_norm": 2.34375, + "learning_rate": 0.00018955477738869436, + "loss": 4.5823, + "step": 528 + }, + { + "epoch": 0.21943378616613088, + "grad_norm": 2.1875, + "learning_rate": 0.00018953476738369185, + "loss": 4.4656, + "step": 529 + }, + { + "epoch": 0.21984859483563207, + "grad_norm": 2.078125, + "learning_rate": 0.00018951475737868936, + "loss": 4.9049, + "step": 530 + }, + { + "epoch": 0.22026340350513327, + "grad_norm": 2.1875, + "learning_rate": 0.00018949474737368685, + "loss": 4.6767, + "step": 531 + }, + { + "epoch": 0.22067821217463446, + "grad_norm": 2.046875, + "learning_rate": 0.00018947473736868436, + "loss": 4.6346, + "step": 532 + }, + { + "epoch": 0.22109302084413565, + "grad_norm": 2.453125, + "learning_rate": 0.00018945472736368185, + "loss": 4.4386, + "step": 533 + }, + { + "epoch": 0.22150782951363684, + "grad_norm": 2.359375, + "learning_rate": 0.00018943471735867934, + "loss": 4.6902, + "step": 534 + }, + { + "epoch": 0.22192263818313804, + "grad_norm": 2.046875, + "learning_rate": 0.00018941470735367685, + "loss": 4.4004, + "step": 535 + }, + { + "epoch": 0.22233744685263923, + "grad_norm": 2.984375, + "learning_rate": 0.00018939469734867434, + "loss": 4.3807, + "step": 536 + }, + { + "epoch": 0.22275225552214042, + "grad_norm": 2.375, + "learning_rate": 0.00018937468734367185, + "loss": 4.5815, + "step": 537 + }, + { + "epoch": 0.22316706419164162, + "grad_norm": 2.265625, + "learning_rate": 0.00018935467733866934, + "loss": 4.7156, + "step": 538 + }, + { + "epoch": 0.2235818728611428, + "grad_norm": 2.171875, + "learning_rate": 0.00018933466733366686, + "loss": 4.5454, + "step": 539 + }, + { + "epoch": 0.223996681530644, + "grad_norm": 2.328125, + "learning_rate": 0.00018931465732866434, + "loss": 4.5494, + "step": 540 + }, + { + "epoch": 0.22441149020014517, + "grad_norm": 2.3125, + "learning_rate": 0.00018929464732366186, + "loss": 4.6564, + "step": 541 + }, + { + "epoch": 0.22482629886964636, + "grad_norm": 2.453125, + "learning_rate": 0.00018927463731865934, + "loss": 4.7024, + "step": 542 + }, + { + "epoch": 0.22524110753914756, + "grad_norm": 2.28125, + "learning_rate": 0.00018925462731365683, + "loss": 4.6705, + "step": 543 + }, + { + "epoch": 0.22565591620864875, + "grad_norm": 2.40625, + "learning_rate": 0.00018923461730865432, + "loss": 4.5733, + "step": 544 + }, + { + "epoch": 0.22607072487814994, + "grad_norm": 1.984375, + "learning_rate": 0.00018921460730365183, + "loss": 4.5865, + "step": 545 + }, + { + "epoch": 0.22648553354765114, + "grad_norm": 2.140625, + "learning_rate": 0.00018919459729864932, + "loss": 4.6588, + "step": 546 + }, + { + "epoch": 0.22690034221715233, + "grad_norm": 2.421875, + "learning_rate": 0.00018917458729364684, + "loss": 4.362, + "step": 547 + }, + { + "epoch": 0.22731515088665352, + "grad_norm": 2.078125, + "learning_rate": 0.00018915457728864435, + "loss": 4.434, + "step": 548 + }, + { + "epoch": 0.22772995955615472, + "grad_norm": 2.359375, + "learning_rate": 0.00018913456728364184, + "loss": 4.5605, + "step": 549 + }, + { + "epoch": 0.2281447682256559, + "grad_norm": 2.5625, + "learning_rate": 0.00018911455727863932, + "loss": 4.3094, + "step": 550 + }, + { + "epoch": 0.2285595768951571, + "grad_norm": 2.734375, + "learning_rate": 0.0001890945472736368, + "loss": 4.3491, + "step": 551 + }, + { + "epoch": 0.2289743855646583, + "grad_norm": 2.25, + "learning_rate": 0.00018907453726863433, + "loss": 4.5983, + "step": 552 + }, + { + "epoch": 0.2293891942341595, + "grad_norm": 1.9140625, + "learning_rate": 0.0001890545272636318, + "loss": 4.6936, + "step": 553 + }, + { + "epoch": 0.22980400290366068, + "grad_norm": 2.40625, + "learning_rate": 0.00018903451725862933, + "loss": 4.4533, + "step": 554 + }, + { + "epoch": 0.23021881157316187, + "grad_norm": 2.203125, + "learning_rate": 0.00018901450725362681, + "loss": 4.3935, + "step": 555 + }, + { + "epoch": 0.23063362024266307, + "grad_norm": 2.125, + "learning_rate": 0.00018899449724862433, + "loss": 4.5972, + "step": 556 + }, + { + "epoch": 0.23104842891216426, + "grad_norm": 2.4375, + "learning_rate": 0.00018897448724362182, + "loss": 4.4789, + "step": 557 + }, + { + "epoch": 0.23146323758166545, + "grad_norm": 2.203125, + "learning_rate": 0.00018895447723861933, + "loss": 4.5092, + "step": 558 + }, + { + "epoch": 0.23187804625116665, + "grad_norm": 2.390625, + "learning_rate": 0.00018893446723361682, + "loss": 4.5099, + "step": 559 + }, + { + "epoch": 0.23229285492066784, + "grad_norm": 2.453125, + "learning_rate": 0.0001889144572286143, + "loss": 4.5015, + "step": 560 + }, + { + "epoch": 0.23270766359016903, + "grad_norm": 2.25, + "learning_rate": 0.00018889444722361182, + "loss": 4.2881, + "step": 561 + }, + { + "epoch": 0.23312247225967023, + "grad_norm": 2.078125, + "learning_rate": 0.0001888744372186093, + "loss": 4.309, + "step": 562 + }, + { + "epoch": 0.23353728092917142, + "grad_norm": 2.296875, + "learning_rate": 0.00018885442721360682, + "loss": 4.5298, + "step": 563 + }, + { + "epoch": 0.2339520895986726, + "grad_norm": 2.125, + "learning_rate": 0.0001888344172086043, + "loss": 4.4788, + "step": 564 + }, + { + "epoch": 0.2343668982681738, + "grad_norm": 2.03125, + "learning_rate": 0.00018881440720360182, + "loss": 4.3574, + "step": 565 + }, + { + "epoch": 0.234781706937675, + "grad_norm": 2.265625, + "learning_rate": 0.0001887943971985993, + "loss": 4.5106, + "step": 566 + }, + { + "epoch": 0.2351965156071762, + "grad_norm": 2.21875, + "learning_rate": 0.00018877438719359682, + "loss": 4.6805, + "step": 567 + }, + { + "epoch": 0.2356113242766774, + "grad_norm": 2.171875, + "learning_rate": 0.0001887543771885943, + "loss": 4.4447, + "step": 568 + }, + { + "epoch": 0.23602613294617858, + "grad_norm": 2.15625, + "learning_rate": 0.0001887343671835918, + "loss": 4.4886, + "step": 569 + }, + { + "epoch": 0.23644094161567977, + "grad_norm": 2.109375, + "learning_rate": 0.0001887143571785893, + "loss": 4.5327, + "step": 570 + }, + { + "epoch": 0.23685575028518097, + "grad_norm": 2.0625, + "learning_rate": 0.0001886943471735868, + "loss": 4.4291, + "step": 571 + }, + { + "epoch": 0.23727055895468216, + "grad_norm": 2.328125, + "learning_rate": 0.0001886743371685843, + "loss": 4.4504, + "step": 572 + }, + { + "epoch": 0.23768536762418335, + "grad_norm": 2.71875, + "learning_rate": 0.0001886543271635818, + "loss": 4.5148, + "step": 573 + }, + { + "epoch": 0.23810017629368455, + "grad_norm": 2.078125, + "learning_rate": 0.0001886343171585793, + "loss": 4.6872, + "step": 574 + }, + { + "epoch": 0.23851498496318574, + "grad_norm": 2.28125, + "learning_rate": 0.0001886143071535768, + "loss": 4.3635, + "step": 575 + }, + { + "epoch": 0.23892979363268693, + "grad_norm": 2.390625, + "learning_rate": 0.00018859429714857432, + "loss": 4.5316, + "step": 576 + }, + { + "epoch": 0.23934460230218813, + "grad_norm": 2.765625, + "learning_rate": 0.0001885742871435718, + "loss": 4.5817, + "step": 577 + }, + { + "epoch": 0.23975941097168932, + "grad_norm": 2.1875, + "learning_rate": 0.0001885542771385693, + "loss": 4.6379, + "step": 578 + }, + { + "epoch": 0.2401742196411905, + "grad_norm": 2.421875, + "learning_rate": 0.00018853426713356678, + "loss": 4.5374, + "step": 579 + }, + { + "epoch": 0.2405890283106917, + "grad_norm": 2.171875, + "learning_rate": 0.0001885142571285643, + "loss": 4.2486, + "step": 580 + }, + { + "epoch": 0.2410038369801929, + "grad_norm": 2.234375, + "learning_rate": 0.00018849424712356178, + "loss": 4.395, + "step": 581 + }, + { + "epoch": 0.2414186456496941, + "grad_norm": 2.234375, + "learning_rate": 0.0001884742371185593, + "loss": 4.7067, + "step": 582 + }, + { + "epoch": 0.24183345431919528, + "grad_norm": 2.328125, + "learning_rate": 0.00018845422711355678, + "loss": 4.3778, + "step": 583 + }, + { + "epoch": 0.24224826298869645, + "grad_norm": 2.34375, + "learning_rate": 0.0001884342171085543, + "loss": 4.6454, + "step": 584 + }, + { + "epoch": 0.24266307165819764, + "grad_norm": 2.6875, + "learning_rate": 0.00018841420710355179, + "loss": 4.4081, + "step": 585 + }, + { + "epoch": 0.24307788032769884, + "grad_norm": 2.203125, + "learning_rate": 0.00018839419709854927, + "loss": 4.4475, + "step": 586 + }, + { + "epoch": 0.24349268899720003, + "grad_norm": 2.34375, + "learning_rate": 0.00018837418709354676, + "loss": 4.6707, + "step": 587 + }, + { + "epoch": 0.24390749766670122, + "grad_norm": 2.359375, + "learning_rate": 0.00018835417708854427, + "loss": 4.6686, + "step": 588 + }, + { + "epoch": 0.24432230633620242, + "grad_norm": 2.578125, + "learning_rate": 0.0001883341670835418, + "loss": 4.4247, + "step": 589 + }, + { + "epoch": 0.2447371150057036, + "grad_norm": 2.09375, + "learning_rate": 0.00018831415707853928, + "loss": 4.5128, + "step": 590 + }, + { + "epoch": 0.2451519236752048, + "grad_norm": 2.171875, + "learning_rate": 0.0001882941470735368, + "loss": 4.5771, + "step": 591 + }, + { + "epoch": 0.245566732344706, + "grad_norm": 2.25, + "learning_rate": 0.00018827413706853428, + "loss": 4.4879, + "step": 592 + }, + { + "epoch": 0.2459815410142072, + "grad_norm": 2.203125, + "learning_rate": 0.0001882541270635318, + "loss": 4.6341, + "step": 593 + }, + { + "epoch": 0.24639634968370838, + "grad_norm": 2.03125, + "learning_rate": 0.00018823411705852928, + "loss": 4.5878, + "step": 594 + }, + { + "epoch": 0.24681115835320958, + "grad_norm": 2.296875, + "learning_rate": 0.00018821410705352677, + "loss": 4.4684, + "step": 595 + }, + { + "epoch": 0.24722596702271077, + "grad_norm": 2.4375, + "learning_rate": 0.00018819409704852425, + "loss": 4.6463, + "step": 596 + }, + { + "epoch": 0.24764077569221196, + "grad_norm": 2.375, + "learning_rate": 0.00018817408704352177, + "loss": 4.2628, + "step": 597 + }, + { + "epoch": 0.24805558436171316, + "grad_norm": 2.03125, + "learning_rate": 0.00018815407703851926, + "loss": 4.3581, + "step": 598 + }, + { + "epoch": 0.24847039303121435, + "grad_norm": 2.140625, + "learning_rate": 0.00018813406703351677, + "loss": 4.5657, + "step": 599 + }, + { + "epoch": 0.24888520170071554, + "grad_norm": 2.328125, + "learning_rate": 0.00018811405702851426, + "loss": 4.4318, + "step": 600 + }, + { + "epoch": 0.24930001037021673, + "grad_norm": 2.140625, + "learning_rate": 0.00018809404702351177, + "loss": 4.4151, + "step": 601 + }, + { + "epoch": 0.24971481903971793, + "grad_norm": 2.171875, + "learning_rate": 0.00018807403701850926, + "loss": 4.5038, + "step": 602 + }, + { + "epoch": 0.25012962770921915, + "grad_norm": 2.3125, + "learning_rate": 0.00018805402701350677, + "loss": 4.62, + "step": 603 + }, + { + "epoch": 0.2505444363787203, + "grad_norm": 2.5, + "learning_rate": 0.00018803401700850426, + "loss": 4.5212, + "step": 604 + }, + { + "epoch": 0.25095924504822154, + "grad_norm": 2.09375, + "learning_rate": 0.00018801400700350175, + "loss": 4.4718, + "step": 605 + }, + { + "epoch": 0.2513740537177227, + "grad_norm": 1.9765625, + "learning_rate": 0.00018799399699849926, + "loss": 4.453, + "step": 606 + }, + { + "epoch": 0.25178886238722387, + "grad_norm": 2.234375, + "learning_rate": 0.00018797398699349675, + "loss": 4.3431, + "step": 607 + }, + { + "epoch": 0.2522036710567251, + "grad_norm": 2.421875, + "learning_rate": 0.00018795397698849426, + "loss": 4.5271, + "step": 608 + }, + { + "epoch": 0.25261847972622625, + "grad_norm": 2.03125, + "learning_rate": 0.00018793396698349175, + "loss": 4.4361, + "step": 609 + }, + { + "epoch": 0.2530332883957275, + "grad_norm": 2.515625, + "learning_rate": 0.00018791395697848927, + "loss": 4.4498, + "step": 610 + }, + { + "epoch": 0.25344809706522864, + "grad_norm": 2.078125, + "learning_rate": 0.00018789394697348675, + "loss": 4.6997, + "step": 611 + }, + { + "epoch": 0.25386290573472986, + "grad_norm": 2.25, + "learning_rate": 0.00018787393696848427, + "loss": 4.5507, + "step": 612 + }, + { + "epoch": 0.254277714404231, + "grad_norm": 2.890625, + "learning_rate": 0.00018785392696348175, + "loss": 4.6352, + "step": 613 + }, + { + "epoch": 0.25469252307373225, + "grad_norm": 2.359375, + "learning_rate": 0.00018783391695847924, + "loss": 4.5464, + "step": 614 + }, + { + "epoch": 0.2551073317432334, + "grad_norm": 2.234375, + "learning_rate": 0.00018781390695347673, + "loss": 4.6704, + "step": 615 + }, + { + "epoch": 0.25552214041273463, + "grad_norm": 2.390625, + "learning_rate": 0.00018779389694847424, + "loss": 4.578, + "step": 616 + }, + { + "epoch": 0.2559369490822358, + "grad_norm": 2.953125, + "learning_rate": 0.00018777388694347176, + "loss": 4.6426, + "step": 617 + }, + { + "epoch": 0.256351757751737, + "grad_norm": 2.265625, + "learning_rate": 0.00018775387693846925, + "loss": 4.4142, + "step": 618 + }, + { + "epoch": 0.2567665664212382, + "grad_norm": 2.359375, + "learning_rate": 0.00018773386693346676, + "loss": 4.8122, + "step": 619 + }, + { + "epoch": 0.2571813750907394, + "grad_norm": 2.25, + "learning_rate": 0.00018771385692846425, + "loss": 4.6555, + "step": 620 + }, + { + "epoch": 0.25759618376024057, + "grad_norm": 2.65625, + "learning_rate": 0.00018769384692346173, + "loss": 4.5801, + "step": 621 + }, + { + "epoch": 0.2580109924297418, + "grad_norm": 2.265625, + "learning_rate": 0.00018767383691845922, + "loss": 4.6506, + "step": 622 + }, + { + "epoch": 0.25842580109924296, + "grad_norm": 2.09375, + "learning_rate": 0.00018765382691345674, + "loss": 4.6701, + "step": 623 + }, + { + "epoch": 0.2588406097687442, + "grad_norm": 2.578125, + "learning_rate": 0.00018763381690845422, + "loss": 4.3571, + "step": 624 + }, + { + "epoch": 0.25925541843824534, + "grad_norm": 2.109375, + "learning_rate": 0.00018761380690345174, + "loss": 4.4263, + "step": 625 + }, + { + "epoch": 0.25967022710774657, + "grad_norm": 2.0625, + "learning_rate": 0.00018759379689844922, + "loss": 4.6005, + "step": 626 + }, + { + "epoch": 0.26008503577724773, + "grad_norm": 1.9296875, + "learning_rate": 0.00018757378689344674, + "loss": 4.4602, + "step": 627 + }, + { + "epoch": 0.26049984444674895, + "grad_norm": 2.265625, + "learning_rate": 0.00018755377688844423, + "loss": 4.4964, + "step": 628 + }, + { + "epoch": 0.2609146531162501, + "grad_norm": 1.9296875, + "learning_rate": 0.00018753376688344174, + "loss": 4.2681, + "step": 629 + }, + { + "epoch": 0.26132946178575134, + "grad_norm": 2.0625, + "learning_rate": 0.00018751375687843923, + "loss": 4.4756, + "step": 630 + }, + { + "epoch": 0.2617442704552525, + "grad_norm": 2.046875, + "learning_rate": 0.00018749374687343672, + "loss": 4.4144, + "step": 631 + }, + { + "epoch": 0.2621590791247537, + "grad_norm": 2.203125, + "learning_rate": 0.00018747373686843423, + "loss": 4.322, + "step": 632 + }, + { + "epoch": 0.2625738877942549, + "grad_norm": 2.15625, + "learning_rate": 0.00018745372686343172, + "loss": 4.5444, + "step": 633 + }, + { + "epoch": 0.2629886964637561, + "grad_norm": 2.421875, + "learning_rate": 0.00018743371685842923, + "loss": 4.5651, + "step": 634 + }, + { + "epoch": 0.2634035051332573, + "grad_norm": 2.3125, + "learning_rate": 0.00018741370685342672, + "loss": 4.6755, + "step": 635 + }, + { + "epoch": 0.2638183138027585, + "grad_norm": 2.453125, + "learning_rate": 0.00018739369684842423, + "loss": 4.5744, + "step": 636 + }, + { + "epoch": 0.26423312247225966, + "grad_norm": 2.421875, + "learning_rate": 0.00018737368684342172, + "loss": 4.5153, + "step": 637 + }, + { + "epoch": 0.2646479311417609, + "grad_norm": 2.296875, + "learning_rate": 0.00018735367683841923, + "loss": 4.6289, + "step": 638 + }, + { + "epoch": 0.26506273981126205, + "grad_norm": 2.265625, + "learning_rate": 0.00018733366683341672, + "loss": 4.4528, + "step": 639 + }, + { + "epoch": 0.26547754848076327, + "grad_norm": 2.265625, + "learning_rate": 0.0001873136568284142, + "loss": 4.7298, + "step": 640 + }, + { + "epoch": 0.26589235715026444, + "grad_norm": 2.078125, + "learning_rate": 0.0001872936468234117, + "loss": 4.3405, + "step": 641 + }, + { + "epoch": 0.26630716581976566, + "grad_norm": 2.234375, + "learning_rate": 0.0001872736368184092, + "loss": 4.6529, + "step": 642 + }, + { + "epoch": 0.2667219744892668, + "grad_norm": 2.40625, + "learning_rate": 0.0001872536268134067, + "loss": 4.3613, + "step": 643 + }, + { + "epoch": 0.26713678315876804, + "grad_norm": 2.46875, + "learning_rate": 0.0001872336168084042, + "loss": 4.8304, + "step": 644 + }, + { + "epoch": 0.2675515918282692, + "grad_norm": 2.484375, + "learning_rate": 0.00018721360680340173, + "loss": 4.5604, + "step": 645 + }, + { + "epoch": 0.26796640049777043, + "grad_norm": 2.03125, + "learning_rate": 0.00018719359679839921, + "loss": 4.5692, + "step": 646 + }, + { + "epoch": 0.2683812091672716, + "grad_norm": 2.015625, + "learning_rate": 0.00018717358679339673, + "loss": 4.2713, + "step": 647 + }, + { + "epoch": 0.2687960178367728, + "grad_norm": 2.21875, + "learning_rate": 0.00018715357678839422, + "loss": 4.2112, + "step": 648 + }, + { + "epoch": 0.269210826506274, + "grad_norm": 2.140625, + "learning_rate": 0.0001871335667833917, + "loss": 4.3731, + "step": 649 + }, + { + "epoch": 0.26962563517577515, + "grad_norm": 2.296875, + "learning_rate": 0.0001871135567783892, + "loss": 4.5213, + "step": 650 + }, + { + "epoch": 0.27004044384527637, + "grad_norm": 2.203125, + "learning_rate": 0.0001870935467733867, + "loss": 4.323, + "step": 651 + }, + { + "epoch": 0.27045525251477753, + "grad_norm": 2.375, + "learning_rate": 0.0001870735367683842, + "loss": 4.7545, + "step": 652 + }, + { + "epoch": 0.27087006118427875, + "grad_norm": 2.0625, + "learning_rate": 0.0001870535267633817, + "loss": 4.5885, + "step": 653 + }, + { + "epoch": 0.2712848698537799, + "grad_norm": 2.03125, + "learning_rate": 0.0001870335167583792, + "loss": 4.2861, + "step": 654 + }, + { + "epoch": 0.27169967852328114, + "grad_norm": 2.1875, + "learning_rate": 0.0001870135067533767, + "loss": 4.3931, + "step": 655 + }, + { + "epoch": 0.2721144871927823, + "grad_norm": 2.1875, + "learning_rate": 0.0001869934967483742, + "loss": 4.6776, + "step": 656 + }, + { + "epoch": 0.2725292958622835, + "grad_norm": 2.4375, + "learning_rate": 0.00018697348674337168, + "loss": 4.529, + "step": 657 + }, + { + "epoch": 0.2729441045317847, + "grad_norm": 2.375, + "learning_rate": 0.0001869534767383692, + "loss": 4.48, + "step": 658 + }, + { + "epoch": 0.2733589132012859, + "grad_norm": 2.484375, + "learning_rate": 0.00018693346673336668, + "loss": 4.6314, + "step": 659 + }, + { + "epoch": 0.2737737218707871, + "grad_norm": 2.3125, + "learning_rate": 0.0001869134567283642, + "loss": 4.5495, + "step": 660 + }, + { + "epoch": 0.2741885305402883, + "grad_norm": 2.296875, + "learning_rate": 0.00018689344672336169, + "loss": 4.5729, + "step": 661 + }, + { + "epoch": 0.27460333920978947, + "grad_norm": 2.078125, + "learning_rate": 0.0001868734367183592, + "loss": 4.3969, + "step": 662 + }, + { + "epoch": 0.2750181478792907, + "grad_norm": 1.8984375, + "learning_rate": 0.0001868534267133567, + "loss": 4.5123, + "step": 663 + }, + { + "epoch": 0.27543295654879185, + "grad_norm": 2.234375, + "learning_rate": 0.0001868334167083542, + "loss": 4.2258, + "step": 664 + }, + { + "epoch": 0.2758477652182931, + "grad_norm": 1.9609375, + "learning_rate": 0.0001868134067033517, + "loss": 4.2612, + "step": 665 + }, + { + "epoch": 0.27626257388779424, + "grad_norm": 1.9921875, + "learning_rate": 0.00018679339669834918, + "loss": 4.6057, + "step": 666 + }, + { + "epoch": 0.27667738255729546, + "grad_norm": 2.296875, + "learning_rate": 0.00018677338669334666, + "loss": 4.5225, + "step": 667 + }, + { + "epoch": 0.2770921912267966, + "grad_norm": 2.203125, + "learning_rate": 0.00018675337668834418, + "loss": 4.8157, + "step": 668 + }, + { + "epoch": 0.27750699989629785, + "grad_norm": 2.171875, + "learning_rate": 0.00018673336668334167, + "loss": 4.6119, + "step": 669 + }, + { + "epoch": 0.277921808565799, + "grad_norm": 2.109375, + "learning_rate": 0.00018671335667833918, + "loss": 4.663, + "step": 670 + }, + { + "epoch": 0.27833661723530023, + "grad_norm": 1.921875, + "learning_rate": 0.00018669334667333667, + "loss": 4.3608, + "step": 671 + }, + { + "epoch": 0.2787514259048014, + "grad_norm": 2.015625, + "learning_rate": 0.00018667333666833418, + "loss": 4.4261, + "step": 672 + }, + { + "epoch": 0.2791662345743026, + "grad_norm": 2.234375, + "learning_rate": 0.0001866533266633317, + "loss": 4.2453, + "step": 673 + }, + { + "epoch": 0.2795810432438038, + "grad_norm": 2.140625, + "learning_rate": 0.00018663331665832918, + "loss": 4.6417, + "step": 674 + }, + { + "epoch": 0.279995851913305, + "grad_norm": 2.09375, + "learning_rate": 0.00018661330665332667, + "loss": 4.5089, + "step": 675 + }, + { + "epoch": 0.28041066058280617, + "grad_norm": 2.328125, + "learning_rate": 0.00018659329664832416, + "loss": 4.6126, + "step": 676 + }, + { + "epoch": 0.2808254692523074, + "grad_norm": 2.546875, + "learning_rate": 0.00018657328664332167, + "loss": 4.4829, + "step": 677 + }, + { + "epoch": 0.28124027792180856, + "grad_norm": 2.203125, + "learning_rate": 0.00018655327663831916, + "loss": 4.5772, + "step": 678 + }, + { + "epoch": 0.2816550865913098, + "grad_norm": 2.09375, + "learning_rate": 0.00018653326663331667, + "loss": 4.6509, + "step": 679 + }, + { + "epoch": 0.28206989526081094, + "grad_norm": 1.9453125, + "learning_rate": 0.00018651325662831416, + "loss": 4.586, + "step": 680 + }, + { + "epoch": 0.28248470393031216, + "grad_norm": 2.046875, + "learning_rate": 0.00018649324662331168, + "loss": 4.3017, + "step": 681 + }, + { + "epoch": 0.28289951259981333, + "grad_norm": 2.234375, + "learning_rate": 0.00018647323661830916, + "loss": 4.372, + "step": 682 + }, + { + "epoch": 0.28331432126931455, + "grad_norm": 2.328125, + "learning_rate": 0.00018645322661330668, + "loss": 4.6157, + "step": 683 + }, + { + "epoch": 0.2837291299388157, + "grad_norm": 2.53125, + "learning_rate": 0.00018643321660830414, + "loss": 4.4688, + "step": 684 + }, + { + "epoch": 0.28414393860831694, + "grad_norm": 2.078125, + "learning_rate": 0.00018641320660330165, + "loss": 4.4668, + "step": 685 + }, + { + "epoch": 0.2845587472778181, + "grad_norm": 2.234375, + "learning_rate": 0.00018639319659829917, + "loss": 4.6026, + "step": 686 + }, + { + "epoch": 0.2849735559473193, + "grad_norm": 2.4375, + "learning_rate": 0.00018637318659329665, + "loss": 4.506, + "step": 687 + }, + { + "epoch": 0.2853883646168205, + "grad_norm": 2.25, + "learning_rate": 0.00018635317658829417, + "loss": 4.4546, + "step": 688 + }, + { + "epoch": 0.2858031732863217, + "grad_norm": 2.1875, + "learning_rate": 0.00018633316658329166, + "loss": 4.6656, + "step": 689 + }, + { + "epoch": 0.2862179819558229, + "grad_norm": 2.09375, + "learning_rate": 0.00018631315657828917, + "loss": 4.3545, + "step": 690 + }, + { + "epoch": 0.2866327906253241, + "grad_norm": 2.171875, + "learning_rate": 0.00018629314657328666, + "loss": 4.4224, + "step": 691 + }, + { + "epoch": 0.28704759929482526, + "grad_norm": 2.421875, + "learning_rate": 0.00018627313656828414, + "loss": 4.4423, + "step": 692 + }, + { + "epoch": 0.2874624079643264, + "grad_norm": 2.28125, + "learning_rate": 0.00018625312656328163, + "loss": 4.5748, + "step": 693 + }, + { + "epoch": 0.28787721663382765, + "grad_norm": 2.25, + "learning_rate": 0.00018623311655827915, + "loss": 4.4786, + "step": 694 + }, + { + "epoch": 0.2882920253033288, + "grad_norm": 2.265625, + "learning_rate": 0.00018621310655327663, + "loss": 4.29, + "step": 695 + }, + { + "epoch": 0.28870683397283003, + "grad_norm": 2.0625, + "learning_rate": 0.00018619309654827415, + "loss": 4.5001, + "step": 696 + }, + { + "epoch": 0.2891216426423312, + "grad_norm": 2.046875, + "learning_rate": 0.00018617308654327163, + "loss": 4.2712, + "step": 697 + }, + { + "epoch": 0.2895364513118324, + "grad_norm": 2.140625, + "learning_rate": 0.00018615307653826915, + "loss": 4.4952, + "step": 698 + }, + { + "epoch": 0.2899512599813336, + "grad_norm": 2.125, + "learning_rate": 0.00018613306653326664, + "loss": 4.4281, + "step": 699 + }, + { + "epoch": 0.2903660686508348, + "grad_norm": 2.265625, + "learning_rate": 0.00018611305652826415, + "loss": 4.431, + "step": 700 + }, + { + "epoch": 0.290780877320336, + "grad_norm": 2.171875, + "learning_rate": 0.00018609304652326164, + "loss": 4.5756, + "step": 701 + }, + { + "epoch": 0.2911956859898372, + "grad_norm": 2.375, + "learning_rate": 0.00018607303651825913, + "loss": 4.4734, + "step": 702 + }, + { + "epoch": 0.29161049465933836, + "grad_norm": 2.0, + "learning_rate": 0.00018605302651325664, + "loss": 4.4126, + "step": 703 + }, + { + "epoch": 0.2920253033288396, + "grad_norm": 2.75, + "learning_rate": 0.00018603301650825413, + "loss": 4.412, + "step": 704 + }, + { + "epoch": 0.29244011199834075, + "grad_norm": 2.015625, + "learning_rate": 0.00018601300650325164, + "loss": 4.3727, + "step": 705 + }, + { + "epoch": 0.29285492066784197, + "grad_norm": 2.484375, + "learning_rate": 0.00018599299649824913, + "loss": 4.5848, + "step": 706 + }, + { + "epoch": 0.29326972933734313, + "grad_norm": 2.25, + "learning_rate": 0.00018597298649324664, + "loss": 4.4878, + "step": 707 + }, + { + "epoch": 0.29368453800684435, + "grad_norm": 2.296875, + "learning_rate": 0.00018595297648824413, + "loss": 4.5346, + "step": 708 + }, + { + "epoch": 0.2940993466763455, + "grad_norm": 2.703125, + "learning_rate": 0.00018593296648324164, + "loss": 4.6369, + "step": 709 + }, + { + "epoch": 0.29451415534584674, + "grad_norm": 2.546875, + "learning_rate": 0.00018591295647823913, + "loss": 4.4056, + "step": 710 + }, + { + "epoch": 0.2949289640153479, + "grad_norm": 2.078125, + "learning_rate": 0.00018589294647323662, + "loss": 4.4915, + "step": 711 + }, + { + "epoch": 0.2953437726848491, + "grad_norm": 2.234375, + "learning_rate": 0.0001858729364682341, + "loss": 4.4464, + "step": 712 + }, + { + "epoch": 0.2957585813543503, + "grad_norm": 2.125, + "learning_rate": 0.00018585292646323162, + "loss": 4.241, + "step": 713 + }, + { + "epoch": 0.2961733900238515, + "grad_norm": 2.265625, + "learning_rate": 0.00018583291645822914, + "loss": 4.7127, + "step": 714 + }, + { + "epoch": 0.2965881986933527, + "grad_norm": 2.171875, + "learning_rate": 0.00018581290645322662, + "loss": 4.0667, + "step": 715 + }, + { + "epoch": 0.2970030073628539, + "grad_norm": 2.078125, + "learning_rate": 0.00018579289644822414, + "loss": 4.4533, + "step": 716 + }, + { + "epoch": 0.29741781603235506, + "grad_norm": 2.265625, + "learning_rate": 0.00018577288644322162, + "loss": 4.551, + "step": 717 + }, + { + "epoch": 0.2978326247018563, + "grad_norm": 2.046875, + "learning_rate": 0.00018575287643821914, + "loss": 4.4095, + "step": 718 + }, + { + "epoch": 0.29824743337135745, + "grad_norm": 2.234375, + "learning_rate": 0.00018573286643321663, + "loss": 4.4535, + "step": 719 + }, + { + "epoch": 0.29866224204085867, + "grad_norm": 2.265625, + "learning_rate": 0.0001857128564282141, + "loss": 4.5547, + "step": 720 + }, + { + "epoch": 0.29907705071035984, + "grad_norm": 2.46875, + "learning_rate": 0.0001856928464232116, + "loss": 4.4773, + "step": 721 + }, + { + "epoch": 0.29949185937986106, + "grad_norm": 2.09375, + "learning_rate": 0.00018567283641820911, + "loss": 4.681, + "step": 722 + }, + { + "epoch": 0.2999066680493622, + "grad_norm": 2.09375, + "learning_rate": 0.0001856528264132066, + "loss": 4.7683, + "step": 723 + }, + { + "epoch": 0.30032147671886344, + "grad_norm": 2.421875, + "learning_rate": 0.00018563281640820412, + "loss": 4.4252, + "step": 724 + }, + { + "epoch": 0.3007362853883646, + "grad_norm": 2.3125, + "learning_rate": 0.0001856128064032016, + "loss": 4.5449, + "step": 725 + }, + { + "epoch": 0.30115109405786583, + "grad_norm": 2.03125, + "learning_rate": 0.00018559279639819912, + "loss": 4.7446, + "step": 726 + }, + { + "epoch": 0.301565902727367, + "grad_norm": 2.0625, + "learning_rate": 0.0001855727863931966, + "loss": 4.5691, + "step": 727 + }, + { + "epoch": 0.3019807113968682, + "grad_norm": 2.109375, + "learning_rate": 0.0001855527763881941, + "loss": 4.4674, + "step": 728 + }, + { + "epoch": 0.3023955200663694, + "grad_norm": 2.25, + "learning_rate": 0.0001855327663831916, + "loss": 4.4785, + "step": 729 + }, + { + "epoch": 0.3028103287358706, + "grad_norm": 2.296875, + "learning_rate": 0.0001855127563781891, + "loss": 4.5722, + "step": 730 + }, + { + "epoch": 0.30322513740537177, + "grad_norm": 2.328125, + "learning_rate": 0.0001854927463731866, + "loss": 4.3957, + "step": 731 + }, + { + "epoch": 0.303639946074873, + "grad_norm": 2.703125, + "learning_rate": 0.0001854727363681841, + "loss": 4.3544, + "step": 732 + }, + { + "epoch": 0.30405475474437416, + "grad_norm": 2.09375, + "learning_rate": 0.0001854527263631816, + "loss": 4.4401, + "step": 733 + }, + { + "epoch": 0.3044695634138754, + "grad_norm": 2.234375, + "learning_rate": 0.0001854327163581791, + "loss": 4.622, + "step": 734 + }, + { + "epoch": 0.30488437208337654, + "grad_norm": 1.90625, + "learning_rate": 0.0001854127063531766, + "loss": 4.6069, + "step": 735 + }, + { + "epoch": 0.3052991807528777, + "grad_norm": 2.171875, + "learning_rate": 0.0001853926963481741, + "loss": 4.2986, + "step": 736 + }, + { + "epoch": 0.30571398942237893, + "grad_norm": 2.28125, + "learning_rate": 0.0001853726863431716, + "loss": 4.498, + "step": 737 + }, + { + "epoch": 0.3061287980918801, + "grad_norm": 2.359375, + "learning_rate": 0.00018535267633816907, + "loss": 4.2426, + "step": 738 + }, + { + "epoch": 0.3065436067613813, + "grad_norm": 2.1875, + "learning_rate": 0.0001853326663331666, + "loss": 4.8175, + "step": 739 + }, + { + "epoch": 0.3069584154308825, + "grad_norm": 2.15625, + "learning_rate": 0.00018531265632816408, + "loss": 4.4716, + "step": 740 + }, + { + "epoch": 0.3073732241003837, + "grad_norm": 1.828125, + "learning_rate": 0.0001852926463231616, + "loss": 4.2615, + "step": 741 + }, + { + "epoch": 0.30778803276988487, + "grad_norm": 2.484375, + "learning_rate": 0.0001852726363181591, + "loss": 4.5818, + "step": 742 + }, + { + "epoch": 0.3082028414393861, + "grad_norm": 2.078125, + "learning_rate": 0.0001852526263131566, + "loss": 4.5623, + "step": 743 + }, + { + "epoch": 0.30861765010888725, + "grad_norm": 2.125, + "learning_rate": 0.0001852326163081541, + "loss": 4.2151, + "step": 744 + }, + { + "epoch": 0.3090324587783885, + "grad_norm": 2.3125, + "learning_rate": 0.0001852126063031516, + "loss": 4.4879, + "step": 745 + }, + { + "epoch": 0.30944726744788964, + "grad_norm": 2.265625, + "learning_rate": 0.00018519259629814908, + "loss": 4.4464, + "step": 746 + }, + { + "epoch": 0.30986207611739086, + "grad_norm": 2.234375, + "learning_rate": 0.00018517258629314657, + "loss": 4.4975, + "step": 747 + }, + { + "epoch": 0.310276884786892, + "grad_norm": 1.921875, + "learning_rate": 0.00018515257628814408, + "loss": 4.2955, + "step": 748 + }, + { + "epoch": 0.31069169345639325, + "grad_norm": 2.40625, + "learning_rate": 0.00018513256628314157, + "loss": 4.4464, + "step": 749 + }, + { + "epoch": 0.3111065021258944, + "grad_norm": 2.0625, + "learning_rate": 0.00018511255627813908, + "loss": 4.5451, + "step": 750 + }, + { + "epoch": 0.31152131079539563, + "grad_norm": 2.125, + "learning_rate": 0.00018509254627313657, + "loss": 4.4628, + "step": 751 + }, + { + "epoch": 0.3119361194648968, + "grad_norm": 2.125, + "learning_rate": 0.00018507253626813409, + "loss": 4.624, + "step": 752 + }, + { + "epoch": 0.312350928134398, + "grad_norm": 2.15625, + "learning_rate": 0.00018505252626313157, + "loss": 4.494, + "step": 753 + }, + { + "epoch": 0.3127657368038992, + "grad_norm": 2.046875, + "learning_rate": 0.0001850325162581291, + "loss": 4.4368, + "step": 754 + }, + { + "epoch": 0.3131805454734004, + "grad_norm": 2.15625, + "learning_rate": 0.00018501250625312657, + "loss": 4.3604, + "step": 755 + }, + { + "epoch": 0.3135953541429016, + "grad_norm": 2.046875, + "learning_rate": 0.00018499249624812406, + "loss": 4.4223, + "step": 756 + }, + { + "epoch": 0.3140101628124028, + "grad_norm": 2.265625, + "learning_rate": 0.00018497248624312158, + "loss": 4.4868, + "step": 757 + }, + { + "epoch": 0.31442497148190396, + "grad_norm": 2.078125, + "learning_rate": 0.00018495247623811906, + "loss": 4.5598, + "step": 758 + }, + { + "epoch": 0.3148397801514052, + "grad_norm": 2.578125, + "learning_rate": 0.00018493246623311658, + "loss": 4.4284, + "step": 759 + }, + { + "epoch": 0.31525458882090635, + "grad_norm": 2.140625, + "learning_rate": 0.00018491245622811407, + "loss": 4.2679, + "step": 760 + }, + { + "epoch": 0.31566939749040757, + "grad_norm": 2.140625, + "learning_rate": 0.00018489244622311158, + "loss": 4.6374, + "step": 761 + }, + { + "epoch": 0.31608420615990873, + "grad_norm": 2.0, + "learning_rate": 0.00018487243621810907, + "loss": 4.4247, + "step": 762 + }, + { + "epoch": 0.31649901482940995, + "grad_norm": 2.140625, + "learning_rate": 0.00018485242621310655, + "loss": 4.3802, + "step": 763 + }, + { + "epoch": 0.3169138234989111, + "grad_norm": 2.46875, + "learning_rate": 0.00018483241620810404, + "loss": 4.7218, + "step": 764 + }, + { + "epoch": 0.31732863216841234, + "grad_norm": 2.03125, + "learning_rate": 0.00018481240620310156, + "loss": 4.5808, + "step": 765 + }, + { + "epoch": 0.3177434408379135, + "grad_norm": 2.1875, + "learning_rate": 0.00018479239619809904, + "loss": 4.3449, + "step": 766 + }, + { + "epoch": 0.3181582495074147, + "grad_norm": 2.21875, + "learning_rate": 0.00018477238619309656, + "loss": 4.2355, + "step": 767 + }, + { + "epoch": 0.3185730581769159, + "grad_norm": 2.34375, + "learning_rate": 0.00018475237618809404, + "loss": 4.408, + "step": 768 + }, + { + "epoch": 0.3189878668464171, + "grad_norm": 3.0, + "learning_rate": 0.00018473236618309156, + "loss": 4.4664, + "step": 769 + }, + { + "epoch": 0.3194026755159183, + "grad_norm": 2.25, + "learning_rate": 0.00018471235617808907, + "loss": 4.3633, + "step": 770 + }, + { + "epoch": 0.3198174841854195, + "grad_norm": 1.984375, + "learning_rate": 0.00018469234617308656, + "loss": 4.4451, + "step": 771 + }, + { + "epoch": 0.32023229285492066, + "grad_norm": 1.8515625, + "learning_rate": 0.00018467233616808405, + "loss": 4.6828, + "step": 772 + }, + { + "epoch": 0.3206471015244219, + "grad_norm": 2.28125, + "learning_rate": 0.00018465232616308154, + "loss": 4.3011, + "step": 773 + }, + { + "epoch": 0.32106191019392305, + "grad_norm": 2.171875, + "learning_rate": 0.00018463231615807905, + "loss": 4.3755, + "step": 774 + }, + { + "epoch": 0.32147671886342427, + "grad_norm": 2.859375, + "learning_rate": 0.00018461230615307654, + "loss": 4.5426, + "step": 775 + }, + { + "epoch": 0.32189152753292544, + "grad_norm": 2.34375, + "learning_rate": 0.00018459229614807405, + "loss": 4.4585, + "step": 776 + }, + { + "epoch": 0.32230633620242666, + "grad_norm": 2.3125, + "learning_rate": 0.00018457228614307154, + "loss": 4.3362, + "step": 777 + }, + { + "epoch": 0.3227211448719278, + "grad_norm": 2.328125, + "learning_rate": 0.00018455227613806905, + "loss": 4.2984, + "step": 778 + }, + { + "epoch": 0.323135953541429, + "grad_norm": 2.125, + "learning_rate": 0.00018453226613306654, + "loss": 4.3185, + "step": 779 + }, + { + "epoch": 0.3235507622109302, + "grad_norm": 2.0625, + "learning_rate": 0.00018451225612806405, + "loss": 4.2773, + "step": 780 + }, + { + "epoch": 0.3239655708804314, + "grad_norm": 2.015625, + "learning_rate": 0.00018449224612306154, + "loss": 4.3669, + "step": 781 + }, + { + "epoch": 0.3243803795499326, + "grad_norm": 2.140625, + "learning_rate": 0.00018447223611805903, + "loss": 4.2738, + "step": 782 + }, + { + "epoch": 0.32479518821943376, + "grad_norm": 2.046875, + "learning_rate": 0.00018445222611305654, + "loss": 4.3231, + "step": 783 + }, + { + "epoch": 0.325209996888935, + "grad_norm": 2.234375, + "learning_rate": 0.00018443221610805403, + "loss": 4.6122, + "step": 784 + }, + { + "epoch": 0.32562480555843615, + "grad_norm": 2.25, + "learning_rate": 0.00018441220610305155, + "loss": 4.3661, + "step": 785 + }, + { + "epoch": 0.32603961422793737, + "grad_norm": 2.140625, + "learning_rate": 0.00018439219609804903, + "loss": 4.4894, + "step": 786 + }, + { + "epoch": 0.32645442289743853, + "grad_norm": 2.3125, + "learning_rate": 0.00018437218609304655, + "loss": 4.7167, + "step": 787 + }, + { + "epoch": 0.32686923156693976, + "grad_norm": 1.96875, + "learning_rate": 0.00018435217608804403, + "loss": 4.4352, + "step": 788 + }, + { + "epoch": 0.3272840402364409, + "grad_norm": 2.078125, + "learning_rate": 0.00018433216608304155, + "loss": 4.2495, + "step": 789 + }, + { + "epoch": 0.32769884890594214, + "grad_norm": 1.984375, + "learning_rate": 0.000184312156078039, + "loss": 4.46, + "step": 790 + }, + { + "epoch": 0.3281136575754433, + "grad_norm": 2.203125, + "learning_rate": 0.00018429214607303652, + "loss": 4.4045, + "step": 791 + }, + { + "epoch": 0.32852846624494453, + "grad_norm": 2.09375, + "learning_rate": 0.000184272136068034, + "loss": 4.4309, + "step": 792 + }, + { + "epoch": 0.3289432749144457, + "grad_norm": 2.015625, + "learning_rate": 0.00018425212606303152, + "loss": 4.2624, + "step": 793 + }, + { + "epoch": 0.3293580835839469, + "grad_norm": 2.09375, + "learning_rate": 0.000184232116058029, + "loss": 4.6596, + "step": 794 + }, + { + "epoch": 0.3297728922534481, + "grad_norm": 2.203125, + "learning_rate": 0.00018421210605302653, + "loss": 4.6687, + "step": 795 + }, + { + "epoch": 0.3301877009229493, + "grad_norm": 2.109375, + "learning_rate": 0.00018419209604802401, + "loss": 4.4726, + "step": 796 + }, + { + "epoch": 0.33060250959245047, + "grad_norm": 2.265625, + "learning_rate": 0.00018417208604302153, + "loss": 4.7115, + "step": 797 + }, + { + "epoch": 0.3310173182619517, + "grad_norm": 2.140625, + "learning_rate": 0.00018415207603801902, + "loss": 4.5506, + "step": 798 + }, + { + "epoch": 0.33143212693145285, + "grad_norm": 2.140625, + "learning_rate": 0.0001841320660330165, + "loss": 4.2231, + "step": 799 + }, + { + "epoch": 0.3318469356009541, + "grad_norm": 2.296875, + "learning_rate": 0.00018411205602801402, + "loss": 4.351, + "step": 800 + }, + { + "epoch": 0.33226174427045524, + "grad_norm": 2.125, + "learning_rate": 0.0001840920460230115, + "loss": 4.4115, + "step": 801 + }, + { + "epoch": 0.33267655293995646, + "grad_norm": 2.328125, + "learning_rate": 0.00018407203601800902, + "loss": 4.3118, + "step": 802 + }, + { + "epoch": 0.3330913616094576, + "grad_norm": 2.234375, + "learning_rate": 0.0001840520260130065, + "loss": 4.2783, + "step": 803 + }, + { + "epoch": 0.33350617027895885, + "grad_norm": 2.078125, + "learning_rate": 0.00018403201600800402, + "loss": 4.3474, + "step": 804 + }, + { + "epoch": 0.33392097894846, + "grad_norm": 2.015625, + "learning_rate": 0.0001840120060030015, + "loss": 4.6202, + "step": 805 + }, + { + "epoch": 0.33433578761796123, + "grad_norm": 2.0, + "learning_rate": 0.00018399199599799902, + "loss": 4.5117, + "step": 806 + }, + { + "epoch": 0.3347505962874624, + "grad_norm": 2.203125, + "learning_rate": 0.0001839719859929965, + "loss": 4.5406, + "step": 807 + }, + { + "epoch": 0.3351654049569636, + "grad_norm": 2.65625, + "learning_rate": 0.000183951975987994, + "loss": 4.6213, + "step": 808 + }, + { + "epoch": 0.3355802136264648, + "grad_norm": 2.28125, + "learning_rate": 0.00018393196598299148, + "loss": 4.4354, + "step": 809 + }, + { + "epoch": 0.335995022295966, + "grad_norm": 1.8984375, + "learning_rate": 0.000183911955977989, + "loss": 4.5277, + "step": 810 + }, + { + "epoch": 0.33640983096546717, + "grad_norm": 2.125, + "learning_rate": 0.0001838919459729865, + "loss": 4.4133, + "step": 811 + }, + { + "epoch": 0.3368246396349684, + "grad_norm": 1.8359375, + "learning_rate": 0.000183871935967984, + "loss": 4.5978, + "step": 812 + }, + { + "epoch": 0.33723944830446956, + "grad_norm": 1.96875, + "learning_rate": 0.00018385192596298151, + "loss": 4.292, + "step": 813 + }, + { + "epoch": 0.3376542569739708, + "grad_norm": 2.09375, + "learning_rate": 0.000183831915957979, + "loss": 4.4707, + "step": 814 + }, + { + "epoch": 0.33806906564347194, + "grad_norm": 2.1875, + "learning_rate": 0.00018381190595297652, + "loss": 4.3632, + "step": 815 + }, + { + "epoch": 0.33848387431297317, + "grad_norm": 2.046875, + "learning_rate": 0.000183791895947974, + "loss": 4.5217, + "step": 816 + }, + { + "epoch": 0.33889868298247433, + "grad_norm": 2.125, + "learning_rate": 0.0001837718859429715, + "loss": 4.3052, + "step": 817 + }, + { + "epoch": 0.33931349165197555, + "grad_norm": 2.28125, + "learning_rate": 0.00018375187593796898, + "loss": 4.3678, + "step": 818 + }, + { + "epoch": 0.3397283003214767, + "grad_norm": 2.25, + "learning_rate": 0.0001837318659329665, + "loss": 4.3474, + "step": 819 + }, + { + "epoch": 0.34014310899097794, + "grad_norm": 2.28125, + "learning_rate": 0.00018371185592796398, + "loss": 4.1707, + "step": 820 + }, + { + "epoch": 0.3405579176604791, + "grad_norm": 2.203125, + "learning_rate": 0.0001836918459229615, + "loss": 4.3324, + "step": 821 + }, + { + "epoch": 0.34097272632998027, + "grad_norm": 2.125, + "learning_rate": 0.00018367183591795898, + "loss": 4.3408, + "step": 822 + }, + { + "epoch": 0.3413875349994815, + "grad_norm": 2.40625, + "learning_rate": 0.0001836518259129565, + "loss": 4.4247, + "step": 823 + }, + { + "epoch": 0.34180234366898266, + "grad_norm": 2.125, + "learning_rate": 0.000183631815907954, + "loss": 4.4371, + "step": 824 + }, + { + "epoch": 0.3422171523384839, + "grad_norm": 2.5625, + "learning_rate": 0.00018361180590295147, + "loss": 4.4755, + "step": 825 + }, + { + "epoch": 0.34263196100798504, + "grad_norm": 2.0, + "learning_rate": 0.00018359179589794898, + "loss": 4.4183, + "step": 826 + }, + { + "epoch": 0.34304676967748626, + "grad_norm": 2.140625, + "learning_rate": 0.00018357178589294647, + "loss": 4.4067, + "step": 827 + }, + { + "epoch": 0.34346157834698743, + "grad_norm": 1.9375, + "learning_rate": 0.00018355177588794399, + "loss": 4.6639, + "step": 828 + }, + { + "epoch": 0.34387638701648865, + "grad_norm": 2.28125, + "learning_rate": 0.00018353176588294147, + "loss": 4.3251, + "step": 829 + }, + { + "epoch": 0.3442911956859898, + "grad_norm": 2.03125, + "learning_rate": 0.000183511755877939, + "loss": 4.6155, + "step": 830 + }, + { + "epoch": 0.34470600435549104, + "grad_norm": 2.0625, + "learning_rate": 0.00018349174587293648, + "loss": 4.5124, + "step": 831 + }, + { + "epoch": 0.3451208130249922, + "grad_norm": 1.96875, + "learning_rate": 0.000183471735867934, + "loss": 4.3172, + "step": 832 + }, + { + "epoch": 0.3455356216944934, + "grad_norm": 2.203125, + "learning_rate": 0.00018345172586293148, + "loss": 4.2967, + "step": 833 + }, + { + "epoch": 0.3459504303639946, + "grad_norm": 1.9453125, + "learning_rate": 0.00018343171585792896, + "loss": 4.3185, + "step": 834 + }, + { + "epoch": 0.3463652390334958, + "grad_norm": 2.390625, + "learning_rate": 0.00018341170585292645, + "loss": 4.4256, + "step": 835 + }, + { + "epoch": 0.346780047702997, + "grad_norm": 2.203125, + "learning_rate": 0.00018339169584792397, + "loss": 4.3809, + "step": 836 + }, + { + "epoch": 0.3471948563724982, + "grad_norm": 2.265625, + "learning_rate": 0.00018337168584292145, + "loss": 4.2845, + "step": 837 + }, + { + "epoch": 0.34760966504199936, + "grad_norm": 2.09375, + "learning_rate": 0.00018335167583791897, + "loss": 4.5889, + "step": 838 + }, + { + "epoch": 0.3480244737115006, + "grad_norm": 2.21875, + "learning_rate": 0.00018333166583291648, + "loss": 4.5076, + "step": 839 + }, + { + "epoch": 0.34843928238100175, + "grad_norm": 2.234375, + "learning_rate": 0.00018331165582791397, + "loss": 4.4128, + "step": 840 + }, + { + "epoch": 0.34885409105050297, + "grad_norm": 2.3125, + "learning_rate": 0.00018329164582291148, + "loss": 4.6423, + "step": 841 + }, + { + "epoch": 0.34926889972000413, + "grad_norm": 2.0625, + "learning_rate": 0.00018327163581790897, + "loss": 4.2914, + "step": 842 + }, + { + "epoch": 0.34968370838950535, + "grad_norm": 2.234375, + "learning_rate": 0.00018325162581290646, + "loss": 4.4937, + "step": 843 + }, + { + "epoch": 0.3500985170590065, + "grad_norm": 2.296875, + "learning_rate": 0.00018323161580790395, + "loss": 4.3421, + "step": 844 + }, + { + "epoch": 0.35051332572850774, + "grad_norm": 2.15625, + "learning_rate": 0.00018321160580290146, + "loss": 4.8162, + "step": 845 + }, + { + "epoch": 0.3509281343980089, + "grad_norm": 2.15625, + "learning_rate": 0.00018319159579789895, + "loss": 4.3132, + "step": 846 + }, + { + "epoch": 0.3513429430675101, + "grad_norm": 1.9140625, + "learning_rate": 0.00018317158579289646, + "loss": 4.4433, + "step": 847 + }, + { + "epoch": 0.3517577517370113, + "grad_norm": 2.09375, + "learning_rate": 0.00018315157578789395, + "loss": 4.508, + "step": 848 + }, + { + "epoch": 0.3521725604065125, + "grad_norm": 2.0, + "learning_rate": 0.00018313156578289146, + "loss": 4.544, + "step": 849 + }, + { + "epoch": 0.3525873690760137, + "grad_norm": 2.3125, + "learning_rate": 0.00018311155577788895, + "loss": 4.5758, + "step": 850 + }, + { + "epoch": 0.3530021777455149, + "grad_norm": 2.109375, + "learning_rate": 0.00018309154577288646, + "loss": 4.3671, + "step": 851 + }, + { + "epoch": 0.35341698641501607, + "grad_norm": 2.1875, + "learning_rate": 0.00018307153576788395, + "loss": 4.5324, + "step": 852 + }, + { + "epoch": 0.3538317950845173, + "grad_norm": 1.9375, + "learning_rate": 0.00018305152576288144, + "loss": 4.6805, + "step": 853 + }, + { + "epoch": 0.35424660375401845, + "grad_norm": 1.9609375, + "learning_rate": 0.00018303151575787895, + "loss": 4.4482, + "step": 854 + }, + { + "epoch": 0.3546614124235197, + "grad_norm": 2.203125, + "learning_rate": 0.00018301150575287644, + "loss": 4.4146, + "step": 855 + }, + { + "epoch": 0.35507622109302084, + "grad_norm": 2.421875, + "learning_rate": 0.00018299149574787396, + "loss": 4.4826, + "step": 856 + }, + { + "epoch": 0.35549102976252206, + "grad_norm": 2.171875, + "learning_rate": 0.00018297148574287144, + "loss": 4.3543, + "step": 857 + }, + { + "epoch": 0.3559058384320232, + "grad_norm": 1.953125, + "learning_rate": 0.00018295147573786896, + "loss": 4.4718, + "step": 858 + }, + { + "epoch": 0.35632064710152445, + "grad_norm": 2.078125, + "learning_rate": 0.00018293146573286644, + "loss": 4.5553, + "step": 859 + }, + { + "epoch": 0.3567354557710256, + "grad_norm": 1.921875, + "learning_rate": 0.00018291145572786393, + "loss": 4.522, + "step": 860 + }, + { + "epoch": 0.35715026444052683, + "grad_norm": 2.03125, + "learning_rate": 0.00018289144572286142, + "loss": 4.2493, + "step": 861 + }, + { + "epoch": 0.357565073110028, + "grad_norm": 2.09375, + "learning_rate": 0.00018287143571785893, + "loss": 4.4382, + "step": 862 + }, + { + "epoch": 0.3579798817795292, + "grad_norm": 2.28125, + "learning_rate": 0.00018285142571285642, + "loss": 4.7082, + "step": 863 + }, + { + "epoch": 0.3583946904490304, + "grad_norm": 2.0625, + "learning_rate": 0.00018283141570785393, + "loss": 4.584, + "step": 864 + }, + { + "epoch": 0.35880949911853155, + "grad_norm": 2.625, + "learning_rate": 0.00018281140570285142, + "loss": 4.4695, + "step": 865 + }, + { + "epoch": 0.35922430778803277, + "grad_norm": 2.171875, + "learning_rate": 0.00018279139569784894, + "loss": 4.3282, + "step": 866 + }, + { + "epoch": 0.35963911645753394, + "grad_norm": 1.921875, + "learning_rate": 0.00018277138569284645, + "loss": 4.6258, + "step": 867 + }, + { + "epoch": 0.36005392512703516, + "grad_norm": 2.21875, + "learning_rate": 0.00018275137568784394, + "loss": 4.6066, + "step": 868 + }, + { + "epoch": 0.3604687337965363, + "grad_norm": 1.96875, + "learning_rate": 0.00018273136568284143, + "loss": 4.382, + "step": 869 + }, + { + "epoch": 0.36088354246603754, + "grad_norm": 2.0, + "learning_rate": 0.0001827113556778389, + "loss": 4.769, + "step": 870 + }, + { + "epoch": 0.3612983511355387, + "grad_norm": 2.0, + "learning_rate": 0.00018269134567283643, + "loss": 4.2924, + "step": 871 + }, + { + "epoch": 0.36171315980503993, + "grad_norm": 1.984375, + "learning_rate": 0.00018267133566783391, + "loss": 4.5543, + "step": 872 + }, + { + "epoch": 0.3621279684745411, + "grad_norm": 1.890625, + "learning_rate": 0.00018265132566283143, + "loss": 4.3232, + "step": 873 + }, + { + "epoch": 0.3625427771440423, + "grad_norm": 2.09375, + "learning_rate": 0.00018263131565782892, + "loss": 4.2604, + "step": 874 + }, + { + "epoch": 0.3629575858135435, + "grad_norm": 2.203125, + "learning_rate": 0.00018261130565282643, + "loss": 4.3332, + "step": 875 + }, + { + "epoch": 0.3633723944830447, + "grad_norm": 2.171875, + "learning_rate": 0.00018259129564782392, + "loss": 4.3428, + "step": 876 + }, + { + "epoch": 0.36378720315254587, + "grad_norm": 2.015625, + "learning_rate": 0.00018257128564282143, + "loss": 4.2177, + "step": 877 + }, + { + "epoch": 0.3642020118220471, + "grad_norm": 2.09375, + "learning_rate": 0.00018255127563781892, + "loss": 4.4712, + "step": 878 + }, + { + "epoch": 0.36461682049154825, + "grad_norm": 2.234375, + "learning_rate": 0.0001825312656328164, + "loss": 4.5336, + "step": 879 + }, + { + "epoch": 0.3650316291610495, + "grad_norm": 2.125, + "learning_rate": 0.00018251125562781392, + "loss": 4.6374, + "step": 880 + }, + { + "epoch": 0.36544643783055064, + "grad_norm": 1.875, + "learning_rate": 0.0001824912456228114, + "loss": 4.4747, + "step": 881 + }, + { + "epoch": 0.36586124650005186, + "grad_norm": 2.28125, + "learning_rate": 0.00018247123561780892, + "loss": 4.2962, + "step": 882 + }, + { + "epoch": 0.366276055169553, + "grad_norm": 1.8515625, + "learning_rate": 0.0001824512256128064, + "loss": 4.2172, + "step": 883 + }, + { + "epoch": 0.36669086383905425, + "grad_norm": 2.0625, + "learning_rate": 0.00018243121560780392, + "loss": 4.3837, + "step": 884 + }, + { + "epoch": 0.3671056725085554, + "grad_norm": 1.9765625, + "learning_rate": 0.0001824112056028014, + "loss": 4.3049, + "step": 885 + }, + { + "epoch": 0.36752048117805663, + "grad_norm": 2.0625, + "learning_rate": 0.00018239119559779893, + "loss": 4.4653, + "step": 886 + }, + { + "epoch": 0.3679352898475578, + "grad_norm": 1.96875, + "learning_rate": 0.0001823711855927964, + "loss": 4.517, + "step": 887 + }, + { + "epoch": 0.368350098517059, + "grad_norm": 1.90625, + "learning_rate": 0.0001823511755877939, + "loss": 4.3587, + "step": 888 + }, + { + "epoch": 0.3687649071865602, + "grad_norm": 2.015625, + "learning_rate": 0.0001823311655827914, + "loss": 4.5023, + "step": 889 + }, + { + "epoch": 0.3691797158560614, + "grad_norm": 2.109375, + "learning_rate": 0.0001823111555777889, + "loss": 4.4634, + "step": 890 + }, + { + "epoch": 0.3695945245255626, + "grad_norm": 1.875, + "learning_rate": 0.0001822911455727864, + "loss": 4.2693, + "step": 891 + }, + { + "epoch": 0.3700093331950638, + "grad_norm": 2.28125, + "learning_rate": 0.0001822711355677839, + "loss": 4.4683, + "step": 892 + }, + { + "epoch": 0.37042414186456496, + "grad_norm": 2.015625, + "learning_rate": 0.0001822511255627814, + "loss": 4.2299, + "step": 893 + }, + { + "epoch": 0.3708389505340662, + "grad_norm": 2.484375, + "learning_rate": 0.0001822311155577789, + "loss": 4.4003, + "step": 894 + }, + { + "epoch": 0.37125375920356735, + "grad_norm": 1.9453125, + "learning_rate": 0.00018221110555277642, + "loss": 4.2237, + "step": 895 + }, + { + "epoch": 0.37166856787306857, + "grad_norm": 2.09375, + "learning_rate": 0.00018219109554777388, + "loss": 4.3543, + "step": 896 + }, + { + "epoch": 0.37208337654256973, + "grad_norm": 2.203125, + "learning_rate": 0.0001821710855427714, + "loss": 4.2995, + "step": 897 + }, + { + "epoch": 0.37249818521207095, + "grad_norm": 1.8515625, + "learning_rate": 0.00018215107553776888, + "loss": 4.2978, + "step": 898 + }, + { + "epoch": 0.3729129938815721, + "grad_norm": 2.3125, + "learning_rate": 0.0001821310655327664, + "loss": 4.5876, + "step": 899 + }, + { + "epoch": 0.37332780255107334, + "grad_norm": 2.15625, + "learning_rate": 0.00018211105552776388, + "loss": 4.3065, + "step": 900 + }, + { + "epoch": 0.3737426112205745, + "grad_norm": 2.109375, + "learning_rate": 0.0001820910455227614, + "loss": 4.2268, + "step": 901 + }, + { + "epoch": 0.3741574198900757, + "grad_norm": 2.140625, + "learning_rate": 0.00018207103551775889, + "loss": 4.4453, + "step": 902 + }, + { + "epoch": 0.3745722285595769, + "grad_norm": 2.234375, + "learning_rate": 0.0001820510255127564, + "loss": 4.337, + "step": 903 + }, + { + "epoch": 0.3749870372290781, + "grad_norm": 2.03125, + "learning_rate": 0.0001820310155077539, + "loss": 4.4857, + "step": 904 + }, + { + "epoch": 0.3754018458985793, + "grad_norm": 2.265625, + "learning_rate": 0.00018201100550275137, + "loss": 4.3252, + "step": 905 + }, + { + "epoch": 0.3758166545680805, + "grad_norm": 2.015625, + "learning_rate": 0.00018199099549774886, + "loss": 4.5325, + "step": 906 + }, + { + "epoch": 0.37623146323758166, + "grad_norm": 2.5, + "learning_rate": 0.00018197098549274638, + "loss": 4.3182, + "step": 907 + }, + { + "epoch": 0.3766462719070829, + "grad_norm": 2.125, + "learning_rate": 0.0001819509754877439, + "loss": 4.5372, + "step": 908 + }, + { + "epoch": 0.37706108057658405, + "grad_norm": 2.125, + "learning_rate": 0.00018193096548274138, + "loss": 4.3708, + "step": 909 + }, + { + "epoch": 0.3774758892460852, + "grad_norm": 2.171875, + "learning_rate": 0.0001819109554777389, + "loss": 4.2993, + "step": 910 + }, + { + "epoch": 0.37789069791558644, + "grad_norm": 2.265625, + "learning_rate": 0.00018189094547273638, + "loss": 4.6262, + "step": 911 + }, + { + "epoch": 0.3783055065850876, + "grad_norm": 2.125, + "learning_rate": 0.0001818709354677339, + "loss": 4.2815, + "step": 912 + }, + { + "epoch": 0.3787203152545888, + "grad_norm": 2.046875, + "learning_rate": 0.00018185092546273138, + "loss": 4.2242, + "step": 913 + }, + { + "epoch": 0.37913512392409, + "grad_norm": 2.1875, + "learning_rate": 0.00018183091545772887, + "loss": 4.6269, + "step": 914 + }, + { + "epoch": 0.3795499325935912, + "grad_norm": 2.15625, + "learning_rate": 0.00018181090545272636, + "loss": 4.5047, + "step": 915 + }, + { + "epoch": 0.3799647412630924, + "grad_norm": 2.203125, + "learning_rate": 0.00018179089544772387, + "loss": 4.348, + "step": 916 + }, + { + "epoch": 0.3803795499325936, + "grad_norm": 2.171875, + "learning_rate": 0.00018177088544272136, + "loss": 4.3026, + "step": 917 + }, + { + "epoch": 0.38079435860209476, + "grad_norm": 2.21875, + "learning_rate": 0.00018175087543771887, + "loss": 4.3912, + "step": 918 + }, + { + "epoch": 0.381209167271596, + "grad_norm": 2.46875, + "learning_rate": 0.00018173086543271636, + "loss": 4.3818, + "step": 919 + }, + { + "epoch": 0.38162397594109715, + "grad_norm": 1.9296875, + "learning_rate": 0.00018171085542771387, + "loss": 4.3962, + "step": 920 + }, + { + "epoch": 0.38203878461059837, + "grad_norm": 1.9765625, + "learning_rate": 0.0001816908454227114, + "loss": 4.2943, + "step": 921 + }, + { + "epoch": 0.38245359328009954, + "grad_norm": 2.125, + "learning_rate": 0.00018167083541770887, + "loss": 4.6705, + "step": 922 + }, + { + "epoch": 0.38286840194960076, + "grad_norm": 2.28125, + "learning_rate": 0.00018165082541270636, + "loss": 4.3915, + "step": 923 + }, + { + "epoch": 0.3832832106191019, + "grad_norm": 1.8828125, + "learning_rate": 0.00018163081540770385, + "loss": 4.2542, + "step": 924 + }, + { + "epoch": 0.38369801928860314, + "grad_norm": 1.953125, + "learning_rate": 0.00018161080540270136, + "loss": 4.513, + "step": 925 + }, + { + "epoch": 0.3841128279581043, + "grad_norm": 1.9765625, + "learning_rate": 0.00018159079539769885, + "loss": 4.5005, + "step": 926 + }, + { + "epoch": 0.38452763662760553, + "grad_norm": 2.015625, + "learning_rate": 0.00018157078539269637, + "loss": 4.5435, + "step": 927 + }, + { + "epoch": 0.3849424452971067, + "grad_norm": 1.96875, + "learning_rate": 0.00018155077538769385, + "loss": 4.5259, + "step": 928 + }, + { + "epoch": 0.3853572539666079, + "grad_norm": 2.140625, + "learning_rate": 0.00018153076538269137, + "loss": 4.2063, + "step": 929 + }, + { + "epoch": 0.3857720626361091, + "grad_norm": 1.9375, + "learning_rate": 0.00018151075537768885, + "loss": 4.2209, + "step": 930 + }, + { + "epoch": 0.3861868713056103, + "grad_norm": 2.03125, + "learning_rate": 0.00018149074537268634, + "loss": 4.3294, + "step": 931 + }, + { + "epoch": 0.38660167997511147, + "grad_norm": 2.046875, + "learning_rate": 0.00018147073536768383, + "loss": 4.2802, + "step": 932 + }, + { + "epoch": 0.3870164886446127, + "grad_norm": 2.234375, + "learning_rate": 0.00018145072536268134, + "loss": 4.5184, + "step": 933 + }, + { + "epoch": 0.38743129731411385, + "grad_norm": 2.140625, + "learning_rate": 0.00018143071535767883, + "loss": 4.2314, + "step": 934 + }, + { + "epoch": 0.3878461059836151, + "grad_norm": 2.015625, + "learning_rate": 0.00018141070535267634, + "loss": 4.4718, + "step": 935 + }, + { + "epoch": 0.38826091465311624, + "grad_norm": 2.015625, + "learning_rate": 0.00018139069534767386, + "loss": 4.5467, + "step": 936 + }, + { + "epoch": 0.38867572332261746, + "grad_norm": 2.421875, + "learning_rate": 0.00018137068534267135, + "loss": 4.2893, + "step": 937 + }, + { + "epoch": 0.3890905319921186, + "grad_norm": 2.28125, + "learning_rate": 0.00018135067533766886, + "loss": 4.3072, + "step": 938 + }, + { + "epoch": 0.38950534066161985, + "grad_norm": 1.96875, + "learning_rate": 0.00018133066533266635, + "loss": 4.5215, + "step": 939 + }, + { + "epoch": 0.389920149331121, + "grad_norm": 1.9296875, + "learning_rate": 0.00018131065532766384, + "loss": 4.2413, + "step": 940 + }, + { + "epoch": 0.39033495800062223, + "grad_norm": 2.140625, + "learning_rate": 0.00018129064532266132, + "loss": 4.638, + "step": 941 + }, + { + "epoch": 0.3907497666701234, + "grad_norm": 2.15625, + "learning_rate": 0.00018127063531765884, + "loss": 4.3446, + "step": 942 + }, + { + "epoch": 0.3911645753396246, + "grad_norm": 1.984375, + "learning_rate": 0.00018125062531265632, + "loss": 4.4265, + "step": 943 + }, + { + "epoch": 0.3915793840091258, + "grad_norm": 2.0625, + "learning_rate": 0.00018123061530765384, + "loss": 4.4044, + "step": 944 + }, + { + "epoch": 0.391994192678627, + "grad_norm": 2.203125, + "learning_rate": 0.00018121060530265133, + "loss": 4.3234, + "step": 945 + }, + { + "epoch": 0.3924090013481282, + "grad_norm": 2.09375, + "learning_rate": 0.00018119059529764884, + "loss": 4.3193, + "step": 946 + }, + { + "epoch": 0.3928238100176294, + "grad_norm": 2.234375, + "learning_rate": 0.00018117058529264633, + "loss": 4.3301, + "step": 947 + }, + { + "epoch": 0.39323861868713056, + "grad_norm": 2.265625, + "learning_rate": 0.00018115057528764384, + "loss": 4.4719, + "step": 948 + }, + { + "epoch": 0.3936534273566318, + "grad_norm": 2.140625, + "learning_rate": 0.00018113056528264133, + "loss": 4.4303, + "step": 949 + }, + { + "epoch": 0.39406823602613295, + "grad_norm": 1.9375, + "learning_rate": 0.00018111055527763882, + "loss": 4.3204, + "step": 950 + }, + { + "epoch": 0.39448304469563417, + "grad_norm": 2.0625, + "learning_rate": 0.00018109054527263633, + "loss": 4.7624, + "step": 951 + }, + { + "epoch": 0.39489785336513533, + "grad_norm": 2.046875, + "learning_rate": 0.00018107053526763382, + "loss": 4.3146, + "step": 952 + }, + { + "epoch": 0.3953126620346365, + "grad_norm": 2.140625, + "learning_rate": 0.00018105052526263133, + "loss": 4.4074, + "step": 953 + }, + { + "epoch": 0.3957274707041377, + "grad_norm": 2.125, + "learning_rate": 0.00018103051525762882, + "loss": 4.6061, + "step": 954 + }, + { + "epoch": 0.3961422793736389, + "grad_norm": 2.1875, + "learning_rate": 0.00018101050525262633, + "loss": 4.3081, + "step": 955 + }, + { + "epoch": 0.3965570880431401, + "grad_norm": 2.28125, + "learning_rate": 0.00018099049524762382, + "loss": 4.6302, + "step": 956 + }, + { + "epoch": 0.39697189671264127, + "grad_norm": 1.953125, + "learning_rate": 0.00018097048524262134, + "loss": 4.4911, + "step": 957 + }, + { + "epoch": 0.3973867053821425, + "grad_norm": 2.25, + "learning_rate": 0.00018095047523761882, + "loss": 4.2517, + "step": 958 + }, + { + "epoch": 0.39780151405164366, + "grad_norm": 1.8359375, + "learning_rate": 0.0001809304652326163, + "loss": 4.2793, + "step": 959 + }, + { + "epoch": 0.3982163227211449, + "grad_norm": 1.859375, + "learning_rate": 0.0001809104552276138, + "loss": 4.4896, + "step": 960 + }, + { + "epoch": 0.39863113139064604, + "grad_norm": 2.0625, + "learning_rate": 0.0001808904452226113, + "loss": 4.3398, + "step": 961 + }, + { + "epoch": 0.39904594006014726, + "grad_norm": 2.234375, + "learning_rate": 0.0001808704352176088, + "loss": 4.3402, + "step": 962 + }, + { + "epoch": 0.39946074872964843, + "grad_norm": 2.046875, + "learning_rate": 0.00018085042521260631, + "loss": 4.3537, + "step": 963 + }, + { + "epoch": 0.39987555739914965, + "grad_norm": 1.984375, + "learning_rate": 0.00018083041520760383, + "loss": 4.4073, + "step": 964 + }, + { + "epoch": 0.4002903660686508, + "grad_norm": 2.203125, + "learning_rate": 0.00018081040520260132, + "loss": 4.3917, + "step": 965 + }, + { + "epoch": 0.40070517473815204, + "grad_norm": 2.046875, + "learning_rate": 0.0001807903951975988, + "loss": 4.2131, + "step": 966 + }, + { + "epoch": 0.4011199834076532, + "grad_norm": 2.140625, + "learning_rate": 0.0001807703851925963, + "loss": 4.4373, + "step": 967 + }, + { + "epoch": 0.4015347920771544, + "grad_norm": 1.9296875, + "learning_rate": 0.0001807503751875938, + "loss": 4.756, + "step": 968 + }, + { + "epoch": 0.4019496007466556, + "grad_norm": 2.140625, + "learning_rate": 0.0001807303651825913, + "loss": 4.3509, + "step": 969 + }, + { + "epoch": 0.4023644094161568, + "grad_norm": 1.921875, + "learning_rate": 0.0001807103551775888, + "loss": 4.5064, + "step": 970 + }, + { + "epoch": 0.402779218085658, + "grad_norm": 2.296875, + "learning_rate": 0.0001806903451725863, + "loss": 4.3962, + "step": 971 + }, + { + "epoch": 0.4031940267551592, + "grad_norm": 1.9609375, + "learning_rate": 0.0001806703351675838, + "loss": 4.449, + "step": 972 + }, + { + "epoch": 0.40360883542466036, + "grad_norm": 2.203125, + "learning_rate": 0.0001806503251625813, + "loss": 4.284, + "step": 973 + }, + { + "epoch": 0.4040236440941616, + "grad_norm": 2.09375, + "learning_rate": 0.0001806303151575788, + "loss": 4.4059, + "step": 974 + }, + { + "epoch": 0.40443845276366275, + "grad_norm": 2.09375, + "learning_rate": 0.0001806103051525763, + "loss": 4.4427, + "step": 975 + }, + { + "epoch": 0.40485326143316397, + "grad_norm": 1.953125, + "learning_rate": 0.00018059029514757378, + "loss": 4.305, + "step": 976 + }, + { + "epoch": 0.40526807010266513, + "grad_norm": 1.9765625, + "learning_rate": 0.0001805702851425713, + "loss": 4.3912, + "step": 977 + }, + { + "epoch": 0.40568287877216636, + "grad_norm": 1.875, + "learning_rate": 0.00018055027513756879, + "loss": 4.2529, + "step": 978 + }, + { + "epoch": 0.4060976874416675, + "grad_norm": 2.03125, + "learning_rate": 0.0001805302651325663, + "loss": 4.262, + "step": 979 + }, + { + "epoch": 0.40651249611116874, + "grad_norm": 1.8671875, + "learning_rate": 0.0001805102551275638, + "loss": 4.45, + "step": 980 + }, + { + "epoch": 0.4069273047806699, + "grad_norm": 2.09375, + "learning_rate": 0.0001804902451225613, + "loss": 4.4505, + "step": 981 + }, + { + "epoch": 0.40734211345017113, + "grad_norm": 2.046875, + "learning_rate": 0.0001804702351175588, + "loss": 4.3363, + "step": 982 + }, + { + "epoch": 0.4077569221196723, + "grad_norm": 2.203125, + "learning_rate": 0.0001804502251125563, + "loss": 4.4308, + "step": 983 + }, + { + "epoch": 0.4081717307891735, + "grad_norm": 1.9609375, + "learning_rate": 0.0001804302151075538, + "loss": 4.7916, + "step": 984 + }, + { + "epoch": 0.4085865394586747, + "grad_norm": 1.9375, + "learning_rate": 0.00018041020510255128, + "loss": 4.2049, + "step": 985 + }, + { + "epoch": 0.4090013481281759, + "grad_norm": 2.078125, + "learning_rate": 0.00018039019509754877, + "loss": 4.3206, + "step": 986 + }, + { + "epoch": 0.40941615679767707, + "grad_norm": 2.03125, + "learning_rate": 0.00018037018509254628, + "loss": 4.4308, + "step": 987 + }, + { + "epoch": 0.4098309654671783, + "grad_norm": 2.0, + "learning_rate": 0.00018035017508754377, + "loss": 4.2344, + "step": 988 + }, + { + "epoch": 0.41024577413667945, + "grad_norm": 1.8984375, + "learning_rate": 0.00018033016508254128, + "loss": 4.368, + "step": 989 + }, + { + "epoch": 0.4106605828061807, + "grad_norm": 2.125, + "learning_rate": 0.00018031015507753877, + "loss": 4.6057, + "step": 990 + }, + { + "epoch": 0.41107539147568184, + "grad_norm": 2.140625, + "learning_rate": 0.00018029014507253628, + "loss": 4.1587, + "step": 991 + }, + { + "epoch": 0.41149020014518306, + "grad_norm": 2.234375, + "learning_rate": 0.0001802701350675338, + "loss": 4.3412, + "step": 992 + }, + { + "epoch": 0.4119050088146842, + "grad_norm": 2.109375, + "learning_rate": 0.00018025012506253128, + "loss": 4.5148, + "step": 993 + }, + { + "epoch": 0.41231981748418545, + "grad_norm": 1.90625, + "learning_rate": 0.00018023011505752877, + "loss": 4.2939, + "step": 994 + }, + { + "epoch": 0.4127346261536866, + "grad_norm": 1.8984375, + "learning_rate": 0.00018021010505252626, + "loss": 4.3407, + "step": 995 + }, + { + "epoch": 0.4131494348231878, + "grad_norm": 1.9609375, + "learning_rate": 0.00018019009504752377, + "loss": 4.3568, + "step": 996 + }, + { + "epoch": 0.413564243492689, + "grad_norm": 1.8828125, + "learning_rate": 0.00018017008504252126, + "loss": 4.1772, + "step": 997 + }, + { + "epoch": 0.41397905216219016, + "grad_norm": 1.8984375, + "learning_rate": 0.00018015007503751878, + "loss": 4.1354, + "step": 998 + }, + { + "epoch": 0.4143938608316914, + "grad_norm": 2.09375, + "learning_rate": 0.00018013006503251626, + "loss": 4.259, + "step": 999 + }, + { + "epoch": 0.41480866950119255, + "grad_norm": 2.046875, + "learning_rate": 0.00018011005502751378, + "loss": 4.3353, + "step": 1000 + }, + { + "epoch": 0.41522347817069377, + "grad_norm": 1.859375, + "learning_rate": 0.00018009004502251126, + "loss": 4.305, + "step": 1001 + }, + { + "epoch": 0.41563828684019494, + "grad_norm": 1.9921875, + "learning_rate": 0.00018007003501750875, + "loss": 4.4013, + "step": 1002 + }, + { + "epoch": 0.41605309550969616, + "grad_norm": 2.1875, + "learning_rate": 0.00018005002501250624, + "loss": 4.339, + "step": 1003 + }, + { + "epoch": 0.4164679041791973, + "grad_norm": 2.0625, + "learning_rate": 0.00018003001500750375, + "loss": 4.3102, + "step": 1004 + }, + { + "epoch": 0.41688271284869854, + "grad_norm": 2.0, + "learning_rate": 0.00018001000500250127, + "loss": 4.2317, + "step": 1005 + }, + { + "epoch": 0.4172975215181997, + "grad_norm": 1.953125, + "learning_rate": 0.00017998999499749875, + "loss": 4.2712, + "step": 1006 + }, + { + "epoch": 0.41771233018770093, + "grad_norm": 2.140625, + "learning_rate": 0.00017996998499249627, + "loss": 4.3149, + "step": 1007 + }, + { + "epoch": 0.4181271388572021, + "grad_norm": 2.203125, + "learning_rate": 0.00017994997498749376, + "loss": 4.2377, + "step": 1008 + }, + { + "epoch": 0.4185419475267033, + "grad_norm": 2.015625, + "learning_rate": 0.00017992996498249127, + "loss": 4.7052, + "step": 1009 + }, + { + "epoch": 0.4189567561962045, + "grad_norm": 1.9609375, + "learning_rate": 0.00017990995497748876, + "loss": 4.1802, + "step": 1010 + }, + { + "epoch": 0.4193715648657057, + "grad_norm": 1.8984375, + "learning_rate": 0.00017988994497248625, + "loss": 4.6006, + "step": 1011 + }, + { + "epoch": 0.41978637353520687, + "grad_norm": 2.03125, + "learning_rate": 0.00017986993496748373, + "loss": 4.5496, + "step": 1012 + }, + { + "epoch": 0.4202011822047081, + "grad_norm": 2.046875, + "learning_rate": 0.00017984992496248125, + "loss": 4.3438, + "step": 1013 + }, + { + "epoch": 0.42061599087420926, + "grad_norm": 2.171875, + "learning_rate": 0.00017982991495747873, + "loss": 4.3297, + "step": 1014 + }, + { + "epoch": 0.4210307995437105, + "grad_norm": 2.015625, + "learning_rate": 0.00017980990495247625, + "loss": 4.4914, + "step": 1015 + }, + { + "epoch": 0.42144560821321164, + "grad_norm": 1.9296875, + "learning_rate": 0.00017978989494747374, + "loss": 4.2198, + "step": 1016 + }, + { + "epoch": 0.42186041688271286, + "grad_norm": 2.125, + "learning_rate": 0.00017976988494247125, + "loss": 4.7153, + "step": 1017 + }, + { + "epoch": 0.42227522555221403, + "grad_norm": 2.046875, + "learning_rate": 0.00017974987493746876, + "loss": 4.4418, + "step": 1018 + }, + { + "epoch": 0.42269003422171525, + "grad_norm": 1.9765625, + "learning_rate": 0.00017972986493246625, + "loss": 4.1992, + "step": 1019 + }, + { + "epoch": 0.4231048428912164, + "grad_norm": 2.109375, + "learning_rate": 0.00017970985492746374, + "loss": 4.2828, + "step": 1020 + }, + { + "epoch": 0.42351965156071764, + "grad_norm": 2.203125, + "learning_rate": 0.00017968984492246123, + "loss": 4.5623, + "step": 1021 + }, + { + "epoch": 0.4239344602302188, + "grad_norm": 2.171875, + "learning_rate": 0.00017966983491745874, + "loss": 4.2724, + "step": 1022 + }, + { + "epoch": 0.42434926889972, + "grad_norm": 2.0625, + "learning_rate": 0.00017964982491245623, + "loss": 4.6182, + "step": 1023 + }, + { + "epoch": 0.4247640775692212, + "grad_norm": 2.09375, + "learning_rate": 0.00017962981490745374, + "loss": 4.2161, + "step": 1024 + }, + { + "epoch": 0.4251788862387224, + "grad_norm": 2.03125, + "learning_rate": 0.00017960980490245123, + "loss": 4.3507, + "step": 1025 + }, + { + "epoch": 0.4255936949082236, + "grad_norm": 2.15625, + "learning_rate": 0.00017958979489744874, + "loss": 4.3925, + "step": 1026 + }, + { + "epoch": 0.4260085035777248, + "grad_norm": 1.90625, + "learning_rate": 0.00017956978489244623, + "loss": 4.4245, + "step": 1027 + }, + { + "epoch": 0.42642331224722596, + "grad_norm": 1.9921875, + "learning_rate": 0.00017954977488744375, + "loss": 4.5282, + "step": 1028 + }, + { + "epoch": 0.4268381209167272, + "grad_norm": 1.9765625, + "learning_rate": 0.00017952976488244123, + "loss": 4.3807, + "step": 1029 + }, + { + "epoch": 0.42725292958622835, + "grad_norm": 2.0625, + "learning_rate": 0.00017950975487743872, + "loss": 4.4721, + "step": 1030 + }, + { + "epoch": 0.42766773825572957, + "grad_norm": 1.9921875, + "learning_rate": 0.0001794897448724362, + "loss": 4.3272, + "step": 1031 + }, + { + "epoch": 0.42808254692523073, + "grad_norm": 2.078125, + "learning_rate": 0.00017946973486743372, + "loss": 4.4598, + "step": 1032 + }, + { + "epoch": 0.42849735559473195, + "grad_norm": 1.984375, + "learning_rate": 0.00017944972486243124, + "loss": 4.299, + "step": 1033 + }, + { + "epoch": 0.4289121642642331, + "grad_norm": 2.28125, + "learning_rate": 0.00017942971485742872, + "loss": 4.4141, + "step": 1034 + }, + { + "epoch": 0.42932697293373434, + "grad_norm": 2.03125, + "learning_rate": 0.00017940970485242624, + "loss": 4.2519, + "step": 1035 + }, + { + "epoch": 0.4297417816032355, + "grad_norm": 2.078125, + "learning_rate": 0.00017938969484742373, + "loss": 4.3656, + "step": 1036 + }, + { + "epoch": 0.4301565902727367, + "grad_norm": 2.421875, + "learning_rate": 0.0001793696848424212, + "loss": 4.6236, + "step": 1037 + }, + { + "epoch": 0.4305713989422379, + "grad_norm": 2.375, + "learning_rate": 0.0001793496748374187, + "loss": 4.275, + "step": 1038 + }, + { + "epoch": 0.43098620761173906, + "grad_norm": 1.9140625, + "learning_rate": 0.00017932966483241621, + "loss": 4.1828, + "step": 1039 + }, + { + "epoch": 0.4314010162812403, + "grad_norm": 1.9921875, + "learning_rate": 0.0001793096548274137, + "loss": 4.4596, + "step": 1040 + }, + { + "epoch": 0.43181582495074144, + "grad_norm": 2.15625, + "learning_rate": 0.00017928964482241122, + "loss": 4.4108, + "step": 1041 + }, + { + "epoch": 0.43223063362024267, + "grad_norm": 2.09375, + "learning_rate": 0.0001792696348174087, + "loss": 4.5056, + "step": 1042 + }, + { + "epoch": 0.43264544228974383, + "grad_norm": 2.078125, + "learning_rate": 0.00017924962481240622, + "loss": 4.3782, + "step": 1043 + }, + { + "epoch": 0.43306025095924505, + "grad_norm": 2.1875, + "learning_rate": 0.0001792296148074037, + "loss": 4.1952, + "step": 1044 + }, + { + "epoch": 0.4334750596287462, + "grad_norm": 1.9453125, + "learning_rate": 0.00017920960480240122, + "loss": 4.2819, + "step": 1045 + }, + { + "epoch": 0.43388986829824744, + "grad_norm": 2.09375, + "learning_rate": 0.0001791895947973987, + "loss": 4.26, + "step": 1046 + }, + { + "epoch": 0.4343046769677486, + "grad_norm": 2.046875, + "learning_rate": 0.0001791695847923962, + "loss": 4.4574, + "step": 1047 + }, + { + "epoch": 0.4347194856372498, + "grad_norm": 2.046875, + "learning_rate": 0.0001791495747873937, + "loss": 4.2924, + "step": 1048 + }, + { + "epoch": 0.435134294306751, + "grad_norm": 2.0, + "learning_rate": 0.0001791295647823912, + "loss": 4.1717, + "step": 1049 + }, + { + "epoch": 0.4355491029762522, + "grad_norm": 2.21875, + "learning_rate": 0.0001791095547773887, + "loss": 4.3903, + "step": 1050 + }, + { + "epoch": 0.4359639116457534, + "grad_norm": 2.09375, + "learning_rate": 0.0001790895447723862, + "loss": 4.2457, + "step": 1051 + }, + { + "epoch": 0.4363787203152546, + "grad_norm": 2.25, + "learning_rate": 0.0001790695347673837, + "loss": 4.597, + "step": 1052 + }, + { + "epoch": 0.43679352898475576, + "grad_norm": 2.140625, + "learning_rate": 0.0001790495247623812, + "loss": 4.5183, + "step": 1053 + }, + { + "epoch": 0.437208337654257, + "grad_norm": 2.1875, + "learning_rate": 0.00017902951475737871, + "loss": 4.2368, + "step": 1054 + }, + { + "epoch": 0.43762314632375815, + "grad_norm": 1.9453125, + "learning_rate": 0.0001790095047523762, + "loss": 4.271, + "step": 1055 + }, + { + "epoch": 0.43803795499325937, + "grad_norm": 2.203125, + "learning_rate": 0.0001789894947473737, + "loss": 4.3001, + "step": 1056 + }, + { + "epoch": 0.43845276366276054, + "grad_norm": 2.046875, + "learning_rate": 0.00017896948474237118, + "loss": 4.5858, + "step": 1057 + }, + { + "epoch": 0.43886757233226176, + "grad_norm": 2.1875, + "learning_rate": 0.0001789494747373687, + "loss": 4.5801, + "step": 1058 + }, + { + "epoch": 0.4392823810017629, + "grad_norm": 2.140625, + "learning_rate": 0.00017892946473236618, + "loss": 4.5272, + "step": 1059 + }, + { + "epoch": 0.43969718967126414, + "grad_norm": 2.09375, + "learning_rate": 0.0001789094547273637, + "loss": 4.7148, + "step": 1060 + }, + { + "epoch": 0.4401119983407653, + "grad_norm": 1.9921875, + "learning_rate": 0.0001788894447223612, + "loss": 4.4599, + "step": 1061 + }, + { + "epoch": 0.44052680701026653, + "grad_norm": 1.96875, + "learning_rate": 0.0001788694347173587, + "loss": 4.2887, + "step": 1062 + }, + { + "epoch": 0.4409416156797677, + "grad_norm": 2.03125, + "learning_rate": 0.0001788494247123562, + "loss": 4.5133, + "step": 1063 + }, + { + "epoch": 0.4413564243492689, + "grad_norm": 2.03125, + "learning_rate": 0.0001788294147073537, + "loss": 4.1968, + "step": 1064 + }, + { + "epoch": 0.4417712330187701, + "grad_norm": 2.5625, + "learning_rate": 0.00017880940470235118, + "loss": 4.5081, + "step": 1065 + }, + { + "epoch": 0.4421860416882713, + "grad_norm": 2.28125, + "learning_rate": 0.00017878939469734867, + "loss": 4.2804, + "step": 1066 + }, + { + "epoch": 0.44260085035777247, + "grad_norm": 1.984375, + "learning_rate": 0.00017876938469234618, + "loss": 4.2879, + "step": 1067 + }, + { + "epoch": 0.4430156590272737, + "grad_norm": 2.15625, + "learning_rate": 0.00017874937468734367, + "loss": 4.3384, + "step": 1068 + }, + { + "epoch": 0.44343046769677485, + "grad_norm": 2.421875, + "learning_rate": 0.00017872936468234119, + "loss": 4.4903, + "step": 1069 + }, + { + "epoch": 0.4438452763662761, + "grad_norm": 2.140625, + "learning_rate": 0.00017870935467733867, + "loss": 4.4126, + "step": 1070 + }, + { + "epoch": 0.44426008503577724, + "grad_norm": 2.125, + "learning_rate": 0.0001786893446723362, + "loss": 4.5079, + "step": 1071 + }, + { + "epoch": 0.44467489370527846, + "grad_norm": 2.0625, + "learning_rate": 0.00017866933466733367, + "loss": 4.3872, + "step": 1072 + }, + { + "epoch": 0.4450897023747796, + "grad_norm": 1.984375, + "learning_rate": 0.00017864932466233116, + "loss": 4.7223, + "step": 1073 + }, + { + "epoch": 0.44550451104428085, + "grad_norm": 2.171875, + "learning_rate": 0.00017862931465732868, + "loss": 4.2778, + "step": 1074 + }, + { + "epoch": 0.445919319713782, + "grad_norm": 1.90625, + "learning_rate": 0.00017860930465232616, + "loss": 4.4252, + "step": 1075 + }, + { + "epoch": 0.44633412838328324, + "grad_norm": 2.125, + "learning_rate": 0.00017858929464732368, + "loss": 4.5733, + "step": 1076 + }, + { + "epoch": 0.4467489370527844, + "grad_norm": 2.296875, + "learning_rate": 0.00017856928464232117, + "loss": 4.4882, + "step": 1077 + }, + { + "epoch": 0.4471637457222856, + "grad_norm": 1.875, + "learning_rate": 0.00017854927463731868, + "loss": 4.337, + "step": 1078 + }, + { + "epoch": 0.4475785543917868, + "grad_norm": 1.8828125, + "learning_rate": 0.00017852926463231617, + "loss": 4.1409, + "step": 1079 + }, + { + "epoch": 0.447993363061288, + "grad_norm": 2.109375, + "learning_rate": 0.00017850925462731368, + "loss": 4.4273, + "step": 1080 + }, + { + "epoch": 0.4484081717307892, + "grad_norm": 2.28125, + "learning_rate": 0.00017848924462231117, + "loss": 4.3459, + "step": 1081 + }, + { + "epoch": 0.44882298040029034, + "grad_norm": 2.078125, + "learning_rate": 0.00017846923461730866, + "loss": 4.4212, + "step": 1082 + }, + { + "epoch": 0.44923778906979156, + "grad_norm": 2.09375, + "learning_rate": 0.00017844922461230614, + "loss": 4.6168, + "step": 1083 + }, + { + "epoch": 0.4496525977392927, + "grad_norm": 1.828125, + "learning_rate": 0.00017842921460730366, + "loss": 4.4838, + "step": 1084 + }, + { + "epoch": 0.45006740640879395, + "grad_norm": 1.8671875, + "learning_rate": 0.00017840920460230114, + "loss": 4.4301, + "step": 1085 + }, + { + "epoch": 0.4504822150782951, + "grad_norm": 1.8984375, + "learning_rate": 0.00017838919459729866, + "loss": 4.4215, + "step": 1086 + }, + { + "epoch": 0.45089702374779633, + "grad_norm": 2.03125, + "learning_rate": 0.00017836918459229615, + "loss": 4.3412, + "step": 1087 + }, + { + "epoch": 0.4513118324172975, + "grad_norm": 1.8046875, + "learning_rate": 0.00017834917458729366, + "loss": 4.3783, + "step": 1088 + }, + { + "epoch": 0.4517266410867987, + "grad_norm": 2.328125, + "learning_rate": 0.00017832916458229117, + "loss": 4.6112, + "step": 1089 + }, + { + "epoch": 0.4521414497562999, + "grad_norm": 2.140625, + "learning_rate": 0.00017830915457728866, + "loss": 4.3509, + "step": 1090 + }, + { + "epoch": 0.4525562584258011, + "grad_norm": 2.078125, + "learning_rate": 0.00017828914457228615, + "loss": 4.383, + "step": 1091 + }, + { + "epoch": 0.45297106709530227, + "grad_norm": 1.9140625, + "learning_rate": 0.00017826913456728364, + "loss": 4.2993, + "step": 1092 + }, + { + "epoch": 0.4533858757648035, + "grad_norm": 1.953125, + "learning_rate": 0.00017824912456228115, + "loss": 4.2683, + "step": 1093 + }, + { + "epoch": 0.45380068443430466, + "grad_norm": 1.8984375, + "learning_rate": 0.00017822911455727864, + "loss": 4.5543, + "step": 1094 + }, + { + "epoch": 0.4542154931038059, + "grad_norm": 1.9296875, + "learning_rate": 0.00017820910455227615, + "loss": 4.344, + "step": 1095 + }, + { + "epoch": 0.45463030177330704, + "grad_norm": 1.984375, + "learning_rate": 0.00017818909454727364, + "loss": 4.7335, + "step": 1096 + }, + { + "epoch": 0.45504511044280826, + "grad_norm": 2.171875, + "learning_rate": 0.00017816908454227115, + "loss": 4.3833, + "step": 1097 + }, + { + "epoch": 0.45545991911230943, + "grad_norm": 1.8203125, + "learning_rate": 0.00017814907453726864, + "loss": 4.5725, + "step": 1098 + }, + { + "epoch": 0.45587472778181065, + "grad_norm": 2.015625, + "learning_rate": 0.00017812906453226616, + "loss": 4.5028, + "step": 1099 + }, + { + "epoch": 0.4562895364513118, + "grad_norm": 1.9765625, + "learning_rate": 0.00017810905452726364, + "loss": 4.2995, + "step": 1100 + }, + { + "epoch": 0.45670434512081304, + "grad_norm": 1.9765625, + "learning_rate": 0.00017808904452226113, + "loss": 4.2497, + "step": 1101 + }, + { + "epoch": 0.4571191537903142, + "grad_norm": 1.9453125, + "learning_rate": 0.00017806903451725865, + "loss": 4.4419, + "step": 1102 + }, + { + "epoch": 0.4575339624598154, + "grad_norm": 1.859375, + "learning_rate": 0.00017804902451225613, + "loss": 4.4091, + "step": 1103 + }, + { + "epoch": 0.4579487711293166, + "grad_norm": 1.875, + "learning_rate": 0.00017802901450725365, + "loss": 4.2748, + "step": 1104 + }, + { + "epoch": 0.4583635797988178, + "grad_norm": 1.921875, + "learning_rate": 0.00017800900450225113, + "loss": 4.5101, + "step": 1105 + }, + { + "epoch": 0.458778388468319, + "grad_norm": 2.21875, + "learning_rate": 0.00017798899449724865, + "loss": 4.3646, + "step": 1106 + }, + { + "epoch": 0.4591931971378202, + "grad_norm": 2.078125, + "learning_rate": 0.00017796898449224614, + "loss": 4.4547, + "step": 1107 + }, + { + "epoch": 0.45960800580732136, + "grad_norm": 2.046875, + "learning_rate": 0.00017794897448724362, + "loss": 4.3562, + "step": 1108 + }, + { + "epoch": 0.4600228144768226, + "grad_norm": 2.109375, + "learning_rate": 0.0001779289644822411, + "loss": 4.1776, + "step": 1109 + }, + { + "epoch": 0.46043762314632375, + "grad_norm": 1.78125, + "learning_rate": 0.00017790895447723862, + "loss": 4.2128, + "step": 1110 + }, + { + "epoch": 0.46085243181582497, + "grad_norm": 2.046875, + "learning_rate": 0.0001778889444722361, + "loss": 4.2559, + "step": 1111 + }, + { + "epoch": 0.46126724048532614, + "grad_norm": 1.859375, + "learning_rate": 0.00017786893446723363, + "loss": 4.4851, + "step": 1112 + }, + { + "epoch": 0.46168204915482736, + "grad_norm": 2.375, + "learning_rate": 0.00017784892446223111, + "loss": 4.4802, + "step": 1113 + }, + { + "epoch": 0.4620968578243285, + "grad_norm": 1.9921875, + "learning_rate": 0.00017782891445722863, + "loss": 4.2383, + "step": 1114 + }, + { + "epoch": 0.46251166649382974, + "grad_norm": 2.046875, + "learning_rate": 0.00017780890445222612, + "loss": 4.5444, + "step": 1115 + }, + { + "epoch": 0.4629264751633309, + "grad_norm": 2.0, + "learning_rate": 0.00017778889444722363, + "loss": 4.4736, + "step": 1116 + }, + { + "epoch": 0.46334128383283213, + "grad_norm": 2.234375, + "learning_rate": 0.00017776888444222112, + "loss": 4.37, + "step": 1117 + }, + { + "epoch": 0.4637560925023333, + "grad_norm": 1.921875, + "learning_rate": 0.0001777488744372186, + "loss": 4.3285, + "step": 1118 + }, + { + "epoch": 0.4641709011718345, + "grad_norm": 2.0625, + "learning_rate": 0.00017772886443221612, + "loss": 4.3134, + "step": 1119 + }, + { + "epoch": 0.4645857098413357, + "grad_norm": 2.0, + "learning_rate": 0.0001777088544272136, + "loss": 4.3374, + "step": 1120 + }, + { + "epoch": 0.4650005185108369, + "grad_norm": 1.7890625, + "learning_rate": 0.00017768884442221112, + "loss": 4.2235, + "step": 1121 + }, + { + "epoch": 0.46541532718033807, + "grad_norm": 2.59375, + "learning_rate": 0.0001776688344172086, + "loss": 4.0565, + "step": 1122 + }, + { + "epoch": 0.4658301358498393, + "grad_norm": 1.859375, + "learning_rate": 0.00017764882441220612, + "loss": 4.6, + "step": 1123 + }, + { + "epoch": 0.46624494451934045, + "grad_norm": 1.8828125, + "learning_rate": 0.0001776288144072036, + "loss": 4.2149, + "step": 1124 + }, + { + "epoch": 0.4666597531888416, + "grad_norm": 2.140625, + "learning_rate": 0.00017760880440220112, + "loss": 4.4413, + "step": 1125 + }, + { + "epoch": 0.46707456185834284, + "grad_norm": 1.7734375, + "learning_rate": 0.0001775887943971986, + "loss": 4.3745, + "step": 1126 + }, + { + "epoch": 0.467489370527844, + "grad_norm": 2.09375, + "learning_rate": 0.0001775687843921961, + "loss": 4.0623, + "step": 1127 + }, + { + "epoch": 0.4679041791973452, + "grad_norm": 1.8984375, + "learning_rate": 0.00017754877438719359, + "loss": 4.3665, + "step": 1128 + }, + { + "epoch": 0.4683189878668464, + "grad_norm": 2.0, + "learning_rate": 0.0001775287643821911, + "loss": 4.5509, + "step": 1129 + }, + { + "epoch": 0.4687337965363476, + "grad_norm": 2.03125, + "learning_rate": 0.00017750875437718861, + "loss": 4.2777, + "step": 1130 + }, + { + "epoch": 0.4691486052058488, + "grad_norm": 1.8828125, + "learning_rate": 0.0001774887443721861, + "loss": 4.3339, + "step": 1131 + }, + { + "epoch": 0.46956341387535, + "grad_norm": 1.984375, + "learning_rate": 0.00017746873436718362, + "loss": 4.7568, + "step": 1132 + }, + { + "epoch": 0.46997822254485117, + "grad_norm": 2.1875, + "learning_rate": 0.0001774487243621811, + "loss": 4.5942, + "step": 1133 + }, + { + "epoch": 0.4703930312143524, + "grad_norm": 2.4375, + "learning_rate": 0.00017742871435717862, + "loss": 4.4972, + "step": 1134 + }, + { + "epoch": 0.47080783988385355, + "grad_norm": 1.96875, + "learning_rate": 0.0001774087043521761, + "loss": 4.5464, + "step": 1135 + }, + { + "epoch": 0.4712226485533548, + "grad_norm": 2.375, + "learning_rate": 0.0001773886943471736, + "loss": 4.3029, + "step": 1136 + }, + { + "epoch": 0.47163745722285594, + "grad_norm": 2.09375, + "learning_rate": 0.00017736868434217108, + "loss": 4.1835, + "step": 1137 + }, + { + "epoch": 0.47205226589235716, + "grad_norm": 2.34375, + "learning_rate": 0.0001773486743371686, + "loss": 4.3698, + "step": 1138 + }, + { + "epoch": 0.4724670745618583, + "grad_norm": 2.125, + "learning_rate": 0.00017732866433216608, + "loss": 4.4014, + "step": 1139 + }, + { + "epoch": 0.47288188323135955, + "grad_norm": 1.9921875, + "learning_rate": 0.0001773086543271636, + "loss": 4.3377, + "step": 1140 + }, + { + "epoch": 0.4732966919008607, + "grad_norm": 1.9140625, + "learning_rate": 0.00017728864432216108, + "loss": 4.4091, + "step": 1141 + }, + { + "epoch": 0.47371150057036193, + "grad_norm": 1.96875, + "learning_rate": 0.0001772686343171586, + "loss": 4.3561, + "step": 1142 + }, + { + "epoch": 0.4741263092398631, + "grad_norm": 1.9609375, + "learning_rate": 0.00017724862431215608, + "loss": 4.488, + "step": 1143 + }, + { + "epoch": 0.4745411179093643, + "grad_norm": 1.9140625, + "learning_rate": 0.00017722861430715357, + "loss": 4.5677, + "step": 1144 + }, + { + "epoch": 0.4749559265788655, + "grad_norm": 2.125, + "learning_rate": 0.00017720860430215109, + "loss": 4.376, + "step": 1145 + }, + { + "epoch": 0.4753707352483667, + "grad_norm": 2.0625, + "learning_rate": 0.00017718859429714857, + "loss": 4.4235, + "step": 1146 + }, + { + "epoch": 0.47578554391786787, + "grad_norm": 2.125, + "learning_rate": 0.0001771685842921461, + "loss": 4.4235, + "step": 1147 + }, + { + "epoch": 0.4762003525873691, + "grad_norm": 2.015625, + "learning_rate": 0.00017714857428714358, + "loss": 4.5709, + "step": 1148 + }, + { + "epoch": 0.47661516125687026, + "grad_norm": 2.0625, + "learning_rate": 0.0001771285642821411, + "loss": 4.4402, + "step": 1149 + }, + { + "epoch": 0.4770299699263715, + "grad_norm": 2.125, + "learning_rate": 0.00017710855427713858, + "loss": 4.1787, + "step": 1150 + }, + { + "epoch": 0.47744477859587264, + "grad_norm": 1.9375, + "learning_rate": 0.0001770885442721361, + "loss": 4.3755, + "step": 1151 + }, + { + "epoch": 0.47785958726537386, + "grad_norm": 2.46875, + "learning_rate": 0.00017706853426713358, + "loss": 4.7307, + "step": 1152 + }, + { + "epoch": 0.47827439593487503, + "grad_norm": 1.9296875, + "learning_rate": 0.00017704852426213107, + "loss": 4.4037, + "step": 1153 + }, + { + "epoch": 0.47868920460437625, + "grad_norm": 2.09375, + "learning_rate": 0.00017702851425712855, + "loss": 4.5645, + "step": 1154 + }, + { + "epoch": 0.4791040132738774, + "grad_norm": 1.9375, + "learning_rate": 0.00017700850425212607, + "loss": 4.4413, + "step": 1155 + }, + { + "epoch": 0.47951882194337864, + "grad_norm": 2.21875, + "learning_rate": 0.00017698849424712355, + "loss": 4.189, + "step": 1156 + }, + { + "epoch": 0.4799336306128798, + "grad_norm": 2.078125, + "learning_rate": 0.00017696848424212107, + "loss": 4.4323, + "step": 1157 + }, + { + "epoch": 0.480348439282381, + "grad_norm": 1.8515625, + "learning_rate": 0.00017694847423711858, + "loss": 4.2657, + "step": 1158 + }, + { + "epoch": 0.4807632479518822, + "grad_norm": 2.09375, + "learning_rate": 0.00017692846423211607, + "loss": 4.6062, + "step": 1159 + }, + { + "epoch": 0.4811780566213834, + "grad_norm": 2.375, + "learning_rate": 0.00017690845422711359, + "loss": 4.5159, + "step": 1160 + }, + { + "epoch": 0.4815928652908846, + "grad_norm": 1.9921875, + "learning_rate": 0.00017688844422211107, + "loss": 4.4225, + "step": 1161 + }, + { + "epoch": 0.4820076739603858, + "grad_norm": 1.796875, + "learning_rate": 0.00017686843421710856, + "loss": 4.6641, + "step": 1162 + }, + { + "epoch": 0.48242248262988696, + "grad_norm": 1.890625, + "learning_rate": 0.00017684842421210605, + "loss": 4.3169, + "step": 1163 + }, + { + "epoch": 0.4828372912993882, + "grad_norm": 2.015625, + "learning_rate": 0.00017682841420710356, + "loss": 4.2844, + "step": 1164 + }, + { + "epoch": 0.48325209996888935, + "grad_norm": 2.015625, + "learning_rate": 0.00017680840420210105, + "loss": 4.3329, + "step": 1165 + }, + { + "epoch": 0.48366690863839057, + "grad_norm": 2.0625, + "learning_rate": 0.00017678839419709856, + "loss": 4.4835, + "step": 1166 + }, + { + "epoch": 0.48408171730789173, + "grad_norm": 2.0625, + "learning_rate": 0.00017676838419209605, + "loss": 4.3825, + "step": 1167 + }, + { + "epoch": 0.4844965259773929, + "grad_norm": 2.015625, + "learning_rate": 0.00017674837418709356, + "loss": 4.5153, + "step": 1168 + }, + { + "epoch": 0.4849113346468941, + "grad_norm": 2.140625, + "learning_rate": 0.00017672836418209105, + "loss": 4.3609, + "step": 1169 + }, + { + "epoch": 0.4853261433163953, + "grad_norm": 2.4375, + "learning_rate": 0.00017670835417708857, + "loss": 4.3737, + "step": 1170 + }, + { + "epoch": 0.4857409519858965, + "grad_norm": 2.1875, + "learning_rate": 0.00017668834417208605, + "loss": 4.3602, + "step": 1171 + }, + { + "epoch": 0.4861557606553977, + "grad_norm": 2.234375, + "learning_rate": 0.00017666833416708354, + "loss": 4.3427, + "step": 1172 + }, + { + "epoch": 0.4865705693248989, + "grad_norm": 2.234375, + "learning_rate": 0.00017664832416208106, + "loss": 4.4172, + "step": 1173 + }, + { + "epoch": 0.48698537799440006, + "grad_norm": 2.078125, + "learning_rate": 0.00017662831415707854, + "loss": 4.3782, + "step": 1174 + }, + { + "epoch": 0.4874001866639013, + "grad_norm": 2.375, + "learning_rate": 0.00017660830415207606, + "loss": 4.8118, + "step": 1175 + }, + { + "epoch": 0.48781499533340245, + "grad_norm": 2.109375, + "learning_rate": 0.00017658829414707354, + "loss": 4.6522, + "step": 1176 + }, + { + "epoch": 0.48822980400290367, + "grad_norm": 1.7421875, + "learning_rate": 0.00017656828414207106, + "loss": 4.2258, + "step": 1177 + }, + { + "epoch": 0.48864461267240483, + "grad_norm": 1.8359375, + "learning_rate": 0.00017654827413706855, + "loss": 4.2992, + "step": 1178 + }, + { + "epoch": 0.48905942134190605, + "grad_norm": 2.078125, + "learning_rate": 0.00017652826413206603, + "loss": 4.1931, + "step": 1179 + }, + { + "epoch": 0.4894742300114072, + "grad_norm": 2.03125, + "learning_rate": 0.00017650825412706352, + "loss": 4.4329, + "step": 1180 + }, + { + "epoch": 0.48988903868090844, + "grad_norm": 2.3125, + "learning_rate": 0.00017648824412206103, + "loss": 4.3167, + "step": 1181 + }, + { + "epoch": 0.4903038473504096, + "grad_norm": 2.15625, + "learning_rate": 0.00017646823411705852, + "loss": 4.4636, + "step": 1182 + }, + { + "epoch": 0.4907186560199108, + "grad_norm": 2.359375, + "learning_rate": 0.00017644822411205604, + "loss": 4.1785, + "step": 1183 + }, + { + "epoch": 0.491133464689412, + "grad_norm": 2.5, + "learning_rate": 0.00017642821410705352, + "loss": 4.3716, + "step": 1184 + }, + { + "epoch": 0.4915482733589132, + "grad_norm": 2.046875, + "learning_rate": 0.00017640820410205104, + "loss": 4.6145, + "step": 1185 + }, + { + "epoch": 0.4919630820284144, + "grad_norm": 2.109375, + "learning_rate": 0.00017638819409704855, + "loss": 4.2539, + "step": 1186 + }, + { + "epoch": 0.4923778906979156, + "grad_norm": 1.9375, + "learning_rate": 0.00017636818409204604, + "loss": 4.4768, + "step": 1187 + }, + { + "epoch": 0.49279269936741676, + "grad_norm": 2.125, + "learning_rate": 0.00017634817408704353, + "loss": 4.3121, + "step": 1188 + }, + { + "epoch": 0.493207508036918, + "grad_norm": 2.0625, + "learning_rate": 0.00017632816408204101, + "loss": 4.2803, + "step": 1189 + }, + { + "epoch": 0.49362231670641915, + "grad_norm": 2.21875, + "learning_rate": 0.00017630815407703853, + "loss": 4.81, + "step": 1190 + }, + { + "epoch": 0.49403712537592037, + "grad_norm": 1.8203125, + "learning_rate": 0.00017628814407203602, + "loss": 4.5307, + "step": 1191 + }, + { + "epoch": 0.49445193404542154, + "grad_norm": 1.90625, + "learning_rate": 0.00017626813406703353, + "loss": 4.4134, + "step": 1192 + }, + { + "epoch": 0.49486674271492276, + "grad_norm": 2.015625, + "learning_rate": 0.00017624812406203102, + "loss": 4.3406, + "step": 1193 + }, + { + "epoch": 0.4952815513844239, + "grad_norm": 1.9453125, + "learning_rate": 0.00017622811405702853, + "loss": 4.2751, + "step": 1194 + }, + { + "epoch": 0.49569636005392514, + "grad_norm": 2.125, + "learning_rate": 0.00017620810405202602, + "loss": 4.3858, + "step": 1195 + }, + { + "epoch": 0.4961111687234263, + "grad_norm": 1.96875, + "learning_rate": 0.00017618809404702353, + "loss": 4.4253, + "step": 1196 + }, + { + "epoch": 0.49652597739292753, + "grad_norm": 1.8515625, + "learning_rate": 0.00017616808404202102, + "loss": 4.4802, + "step": 1197 + }, + { + "epoch": 0.4969407860624287, + "grad_norm": 1.875, + "learning_rate": 0.0001761480740370185, + "loss": 4.2476, + "step": 1198 + }, + { + "epoch": 0.4973555947319299, + "grad_norm": 1.8984375, + "learning_rate": 0.00017612806403201602, + "loss": 4.5804, + "step": 1199 + }, + { + "epoch": 0.4977704034014311, + "grad_norm": 2.28125, + "learning_rate": 0.0001761080540270135, + "loss": 4.3889, + "step": 1200 + }, + { + "epoch": 0.4981852120709323, + "grad_norm": 1.7890625, + "learning_rate": 0.00017608804402201102, + "loss": 4.2759, + "step": 1201 + }, + { + "epoch": 0.49860002074043347, + "grad_norm": 1.8046875, + "learning_rate": 0.0001760680340170085, + "loss": 4.4606, + "step": 1202 + }, + { + "epoch": 0.4990148294099347, + "grad_norm": 1.984375, + "learning_rate": 0.00017604802401200603, + "loss": 4.3045, + "step": 1203 + }, + { + "epoch": 0.49942963807943586, + "grad_norm": 1.921875, + "learning_rate": 0.0001760280140070035, + "loss": 4.4005, + "step": 1204 + }, + { + "epoch": 0.4998444467489371, + "grad_norm": 1.890625, + "learning_rate": 0.00017600800400200103, + "loss": 4.7614, + "step": 1205 + }, + { + "epoch": 0.5002592554184383, + "grad_norm": 2.234375, + "learning_rate": 0.00017598799399699851, + "loss": 4.4581, + "step": 1206 + }, + { + "epoch": 0.5006740640879395, + "grad_norm": 1.96875, + "learning_rate": 0.000175967983991996, + "loss": 4.2721, + "step": 1207 + }, + { + "epoch": 0.5010888727574406, + "grad_norm": 1.9765625, + "learning_rate": 0.0001759479739869935, + "loss": 4.5858, + "step": 1208 + }, + { + "epoch": 0.5015036814269418, + "grad_norm": 1.875, + "learning_rate": 0.000175927963981991, + "loss": 4.2532, + "step": 1209 + }, + { + "epoch": 0.5019184900964431, + "grad_norm": 1.984375, + "learning_rate": 0.0001759079539769885, + "loss": 4.5726, + "step": 1210 + }, + { + "epoch": 0.5023332987659442, + "grad_norm": 2.28125, + "learning_rate": 0.000175887943971986, + "loss": 4.3911, + "step": 1211 + }, + { + "epoch": 0.5027481074354454, + "grad_norm": 1.96875, + "learning_rate": 0.0001758679339669835, + "loss": 4.3685, + "step": 1212 + }, + { + "epoch": 0.5031629161049466, + "grad_norm": 1.9140625, + "learning_rate": 0.000175847923961981, + "loss": 4.2716, + "step": 1213 + }, + { + "epoch": 0.5035777247744477, + "grad_norm": 2.09375, + "learning_rate": 0.0001758279139569785, + "loss": 4.3746, + "step": 1214 + }, + { + "epoch": 0.503992533443949, + "grad_norm": 1.8828125, + "learning_rate": 0.00017580790395197598, + "loss": 4.1584, + "step": 1215 + }, + { + "epoch": 0.5044073421134502, + "grad_norm": 2.09375, + "learning_rate": 0.0001757878939469735, + "loss": 4.4824, + "step": 1216 + }, + { + "epoch": 0.5048221507829513, + "grad_norm": 1.9609375, + "learning_rate": 0.00017576788394197098, + "loss": 4.2045, + "step": 1217 + }, + { + "epoch": 0.5052369594524525, + "grad_norm": 2.0625, + "learning_rate": 0.0001757478739369685, + "loss": 4.4475, + "step": 1218 + }, + { + "epoch": 0.5056517681219538, + "grad_norm": 1.9453125, + "learning_rate": 0.00017572786393196599, + "loss": 4.0906, + "step": 1219 + }, + { + "epoch": 0.506066576791455, + "grad_norm": 2.0, + "learning_rate": 0.0001757078539269635, + "loss": 4.3062, + "step": 1220 + }, + { + "epoch": 0.5064813854609561, + "grad_norm": 2.03125, + "learning_rate": 0.000175687843921961, + "loss": 4.2466, + "step": 1221 + }, + { + "epoch": 0.5068961941304573, + "grad_norm": 1.8515625, + "learning_rate": 0.0001756678339169585, + "loss": 4.395, + "step": 1222 + }, + { + "epoch": 0.5073110027999586, + "grad_norm": 1.859375, + "learning_rate": 0.000175647823911956, + "loss": 4.2161, + "step": 1223 + }, + { + "epoch": 0.5077258114694597, + "grad_norm": 2.21875, + "learning_rate": 0.00017562781390695348, + "loss": 4.4077, + "step": 1224 + }, + { + "epoch": 0.5081406201389609, + "grad_norm": 1.8046875, + "learning_rate": 0.00017560780390195096, + "loss": 4.2748, + "step": 1225 + }, + { + "epoch": 0.508555428808462, + "grad_norm": 2.015625, + "learning_rate": 0.00017558779389694848, + "loss": 4.2839, + "step": 1226 + }, + { + "epoch": 0.5089702374779633, + "grad_norm": 1.7578125, + "learning_rate": 0.000175567783891946, + "loss": 4.2471, + "step": 1227 + }, + { + "epoch": 0.5093850461474645, + "grad_norm": 1.9921875, + "learning_rate": 0.00017554777388694348, + "loss": 4.2755, + "step": 1228 + }, + { + "epoch": 0.5097998548169657, + "grad_norm": 1.7890625, + "learning_rate": 0.000175527763881941, + "loss": 4.3981, + "step": 1229 + }, + { + "epoch": 0.5102146634864668, + "grad_norm": 2.140625, + "learning_rate": 0.00017550775387693848, + "loss": 4.489, + "step": 1230 + }, + { + "epoch": 0.5106294721559681, + "grad_norm": 1.9375, + "learning_rate": 0.000175487743871936, + "loss": 4.2009, + "step": 1231 + }, + { + "epoch": 0.5110442808254693, + "grad_norm": 2.09375, + "learning_rate": 0.00017546773386693348, + "loss": 4.5686, + "step": 1232 + }, + { + "epoch": 0.5114590894949704, + "grad_norm": 2.734375, + "learning_rate": 0.00017544772386193097, + "loss": 4.3067, + "step": 1233 + }, + { + "epoch": 0.5118738981644716, + "grad_norm": 2.140625, + "learning_rate": 0.00017542771385692846, + "loss": 4.3154, + "step": 1234 + }, + { + "epoch": 0.5122887068339729, + "grad_norm": 2.09375, + "learning_rate": 0.00017540770385192597, + "loss": 4.75, + "step": 1235 + }, + { + "epoch": 0.512703515503474, + "grad_norm": 2.015625, + "learning_rate": 0.00017538769384692346, + "loss": 4.2725, + "step": 1236 + }, + { + "epoch": 0.5131183241729752, + "grad_norm": 2.125, + "learning_rate": 0.00017536768384192097, + "loss": 4.3889, + "step": 1237 + }, + { + "epoch": 0.5135331328424764, + "grad_norm": 2.078125, + "learning_rate": 0.00017534767383691846, + "loss": 4.2123, + "step": 1238 + }, + { + "epoch": 0.5139479415119776, + "grad_norm": 2.0625, + "learning_rate": 0.00017532766383191597, + "loss": 4.2747, + "step": 1239 + }, + { + "epoch": 0.5143627501814788, + "grad_norm": 1.9765625, + "learning_rate": 0.0001753076538269135, + "loss": 4.4114, + "step": 1240 + }, + { + "epoch": 0.51477755885098, + "grad_norm": 1.96875, + "learning_rate": 0.00017528764382191098, + "loss": 4.3279, + "step": 1241 + }, + { + "epoch": 0.5151923675204811, + "grad_norm": 2.140625, + "learning_rate": 0.00017526763381690846, + "loss": 4.5109, + "step": 1242 + }, + { + "epoch": 0.5156071761899824, + "grad_norm": 1.8984375, + "learning_rate": 0.00017524762381190595, + "loss": 4.2621, + "step": 1243 + }, + { + "epoch": 0.5160219848594836, + "grad_norm": 2.21875, + "learning_rate": 0.00017522761380690347, + "loss": 4.3983, + "step": 1244 + }, + { + "epoch": 0.5164367935289847, + "grad_norm": 1.890625, + "learning_rate": 0.00017520760380190095, + "loss": 4.6373, + "step": 1245 + }, + { + "epoch": 0.5168516021984859, + "grad_norm": 2.0, + "learning_rate": 0.00017518759379689847, + "loss": 4.4741, + "step": 1246 + }, + { + "epoch": 0.5172664108679872, + "grad_norm": 2.03125, + "learning_rate": 0.00017516758379189595, + "loss": 4.3331, + "step": 1247 + }, + { + "epoch": 0.5176812195374884, + "grad_norm": 2.125, + "learning_rate": 0.00017514757378689347, + "loss": 4.4647, + "step": 1248 + }, + { + "epoch": 0.5180960282069895, + "grad_norm": 1.7578125, + "learning_rate": 0.00017512756378189096, + "loss": 4.4374, + "step": 1249 + }, + { + "epoch": 0.5185108368764907, + "grad_norm": 1.8046875, + "learning_rate": 0.00017510755377688844, + "loss": 4.2743, + "step": 1250 + }, + { + "epoch": 0.518925645545992, + "grad_norm": 2.078125, + "learning_rate": 0.00017508754377188593, + "loss": 4.0994, + "step": 1251 + }, + { + "epoch": 0.5193404542154931, + "grad_norm": 2.03125, + "learning_rate": 0.00017506753376688344, + "loss": 4.6526, + "step": 1252 + }, + { + "epoch": 0.5197552628849943, + "grad_norm": 1.8671875, + "learning_rate": 0.00017504752376188093, + "loss": 4.3946, + "step": 1253 + }, + { + "epoch": 0.5201700715544955, + "grad_norm": 1.8828125, + "learning_rate": 0.00017502751375687845, + "loss": 4.1943, + "step": 1254 + }, + { + "epoch": 0.5205848802239966, + "grad_norm": 2.09375, + "learning_rate": 0.00017500750375187596, + "loss": 4.2717, + "step": 1255 + }, + { + "epoch": 0.5209996888934979, + "grad_norm": 1.96875, + "learning_rate": 0.00017498749374687345, + "loss": 4.5654, + "step": 1256 + }, + { + "epoch": 0.5214144975629991, + "grad_norm": 2.34375, + "learning_rate": 0.00017496748374187096, + "loss": 4.5405, + "step": 1257 + }, + { + "epoch": 0.5218293062325002, + "grad_norm": 2.171875, + "learning_rate": 0.00017494747373686845, + "loss": 4.2844, + "step": 1258 + }, + { + "epoch": 0.5222441149020014, + "grad_norm": 2.171875, + "learning_rate": 0.00017492746373186594, + "loss": 4.4466, + "step": 1259 + }, + { + "epoch": 0.5226589235715027, + "grad_norm": 1.9375, + "learning_rate": 0.00017490745372686342, + "loss": 4.3552, + "step": 1260 + }, + { + "epoch": 0.5230737322410038, + "grad_norm": 2.046875, + "learning_rate": 0.00017488744372186094, + "loss": 4.3936, + "step": 1261 + }, + { + "epoch": 0.523488540910505, + "grad_norm": 2.15625, + "learning_rate": 0.00017486743371685843, + "loss": 4.3574, + "step": 1262 + }, + { + "epoch": 0.5239033495800062, + "grad_norm": 2.0625, + "learning_rate": 0.00017484742371185594, + "loss": 4.4254, + "step": 1263 + }, + { + "epoch": 0.5243181582495074, + "grad_norm": 1.8359375, + "learning_rate": 0.00017482741370685343, + "loss": 4.2937, + "step": 1264 + }, + { + "epoch": 0.5247329669190086, + "grad_norm": 1.984375, + "learning_rate": 0.00017480740370185094, + "loss": 4.2344, + "step": 1265 + }, + { + "epoch": 0.5251477755885098, + "grad_norm": 2.234375, + "learning_rate": 0.00017478739369684843, + "loss": 4.1864, + "step": 1266 + }, + { + "epoch": 0.525562584258011, + "grad_norm": 1.984375, + "learning_rate": 0.00017476738369184594, + "loss": 4.1684, + "step": 1267 + }, + { + "epoch": 0.5259773929275122, + "grad_norm": 1.765625, + "learning_rate": 0.00017474737368684343, + "loss": 4.4383, + "step": 1268 + }, + { + "epoch": 0.5263922015970134, + "grad_norm": 2.0, + "learning_rate": 0.00017472736368184092, + "loss": 4.1107, + "step": 1269 + }, + { + "epoch": 0.5268070102665146, + "grad_norm": 2.328125, + "learning_rate": 0.00017470735367683843, + "loss": 4.4619, + "step": 1270 + }, + { + "epoch": 0.5272218189360157, + "grad_norm": 1.8203125, + "learning_rate": 0.00017468734367183592, + "loss": 4.1893, + "step": 1271 + }, + { + "epoch": 0.527636627605517, + "grad_norm": 1.71875, + "learning_rate": 0.00017466733366683343, + "loss": 4.5942, + "step": 1272 + }, + { + "epoch": 0.5280514362750182, + "grad_norm": 2.046875, + "learning_rate": 0.00017464732366183092, + "loss": 4.305, + "step": 1273 + }, + { + "epoch": 0.5284662449445193, + "grad_norm": 1.8359375, + "learning_rate": 0.00017462731365682844, + "loss": 4.1635, + "step": 1274 + }, + { + "epoch": 0.5288810536140205, + "grad_norm": 1.9453125, + "learning_rate": 0.00017460730365182592, + "loss": 4.4209, + "step": 1275 + }, + { + "epoch": 0.5292958622835218, + "grad_norm": 1.9609375, + "learning_rate": 0.00017458729364682344, + "loss": 4.2263, + "step": 1276 + }, + { + "epoch": 0.5297106709530229, + "grad_norm": 1.9609375, + "learning_rate": 0.0001745672836418209, + "loss": 4.355, + "step": 1277 + }, + { + "epoch": 0.5301254796225241, + "grad_norm": 2.046875, + "learning_rate": 0.0001745472736368184, + "loss": 4.2577, + "step": 1278 + }, + { + "epoch": 0.5305402882920253, + "grad_norm": 1.9375, + "learning_rate": 0.0001745272636318159, + "loss": 4.3233, + "step": 1279 + }, + { + "epoch": 0.5309550969615265, + "grad_norm": 1.8359375, + "learning_rate": 0.00017450725362681341, + "loss": 4.3949, + "step": 1280 + }, + { + "epoch": 0.5313699056310277, + "grad_norm": 1.9375, + "learning_rate": 0.0001744872436218109, + "loss": 4.0058, + "step": 1281 + }, + { + "epoch": 0.5317847143005289, + "grad_norm": 2.03125, + "learning_rate": 0.00017446723361680842, + "loss": 4.4116, + "step": 1282 + }, + { + "epoch": 0.53219952297003, + "grad_norm": 1.84375, + "learning_rate": 0.00017444722361180593, + "loss": 4.1986, + "step": 1283 + }, + { + "epoch": 0.5326143316395313, + "grad_norm": 2.0625, + "learning_rate": 0.00017442721360680342, + "loss": 4.248, + "step": 1284 + }, + { + "epoch": 0.5330291403090325, + "grad_norm": 2.171875, + "learning_rate": 0.0001744072036018009, + "loss": 4.5027, + "step": 1285 + }, + { + "epoch": 0.5334439489785336, + "grad_norm": 2.046875, + "learning_rate": 0.0001743871935967984, + "loss": 4.3186, + "step": 1286 + }, + { + "epoch": 0.5338587576480348, + "grad_norm": 1.9609375, + "learning_rate": 0.0001743671835917959, + "loss": 4.3903, + "step": 1287 + }, + { + "epoch": 0.5342735663175361, + "grad_norm": 2.359375, + "learning_rate": 0.0001743471735867934, + "loss": 4.583, + "step": 1288 + }, + { + "epoch": 0.5346883749870373, + "grad_norm": 1.90625, + "learning_rate": 0.0001743271635817909, + "loss": 4.2498, + "step": 1289 + }, + { + "epoch": 0.5351031836565384, + "grad_norm": 1.9453125, + "learning_rate": 0.0001743071535767884, + "loss": 4.5248, + "step": 1290 + }, + { + "epoch": 0.5355179923260396, + "grad_norm": 2.03125, + "learning_rate": 0.0001742871435717859, + "loss": 4.3364, + "step": 1291 + }, + { + "epoch": 0.5359328009955409, + "grad_norm": 2.125, + "learning_rate": 0.0001742671335667834, + "loss": 4.1994, + "step": 1292 + }, + { + "epoch": 0.536347609665042, + "grad_norm": 1.984375, + "learning_rate": 0.0001742471235617809, + "loss": 4.375, + "step": 1293 + }, + { + "epoch": 0.5367624183345432, + "grad_norm": 1.9375, + "learning_rate": 0.0001742271135567784, + "loss": 4.301, + "step": 1294 + }, + { + "epoch": 0.5371772270040444, + "grad_norm": 2.265625, + "learning_rate": 0.00017420710355177589, + "loss": 4.4581, + "step": 1295 + }, + { + "epoch": 0.5375920356735456, + "grad_norm": 1.78125, + "learning_rate": 0.0001741870935467734, + "loss": 4.2014, + "step": 1296 + }, + { + "epoch": 0.5380068443430468, + "grad_norm": 1.875, + "learning_rate": 0.0001741670835417709, + "loss": 4.4829, + "step": 1297 + }, + { + "epoch": 0.538421653012548, + "grad_norm": 2.015625, + "learning_rate": 0.0001741470735367684, + "loss": 4.0274, + "step": 1298 + }, + { + "epoch": 0.5388364616820491, + "grad_norm": 1.7578125, + "learning_rate": 0.0001741270635317659, + "loss": 4.3097, + "step": 1299 + }, + { + "epoch": 0.5392512703515503, + "grad_norm": 1.921875, + "learning_rate": 0.0001741070535267634, + "loss": 4.3272, + "step": 1300 + }, + { + "epoch": 0.5396660790210516, + "grad_norm": 2.359375, + "learning_rate": 0.0001740870435217609, + "loss": 4.6465, + "step": 1301 + }, + { + "epoch": 0.5400808876905527, + "grad_norm": 1.9453125, + "learning_rate": 0.0001740670335167584, + "loss": 3.9895, + "step": 1302 + }, + { + "epoch": 0.5404956963600539, + "grad_norm": 1.875, + "learning_rate": 0.0001740470235117559, + "loss": 4.4907, + "step": 1303 + }, + { + "epoch": 0.5409105050295551, + "grad_norm": 1.8203125, + "learning_rate": 0.00017402701350675338, + "loss": 4.2979, + "step": 1304 + }, + { + "epoch": 0.5413253136990563, + "grad_norm": 1.9375, + "learning_rate": 0.00017400700350175087, + "loss": 4.3366, + "step": 1305 + }, + { + "epoch": 0.5417401223685575, + "grad_norm": 2.0625, + "learning_rate": 0.00017398699349674838, + "loss": 4.311, + "step": 1306 + }, + { + "epoch": 0.5421549310380587, + "grad_norm": 2.203125, + "learning_rate": 0.00017396698349174587, + "loss": 4.4545, + "step": 1307 + }, + { + "epoch": 0.5425697397075598, + "grad_norm": 1.953125, + "learning_rate": 0.00017394697348674338, + "loss": 4.3074, + "step": 1308 + }, + { + "epoch": 0.5429845483770611, + "grad_norm": 2.03125, + "learning_rate": 0.00017392696348174087, + "loss": 4.2206, + "step": 1309 + }, + { + "epoch": 0.5433993570465623, + "grad_norm": 1.8984375, + "learning_rate": 0.00017390695347673838, + "loss": 4.2822, + "step": 1310 + }, + { + "epoch": 0.5438141657160634, + "grad_norm": 2.1875, + "learning_rate": 0.0001738869434717359, + "loss": 4.2488, + "step": 1311 + }, + { + "epoch": 0.5442289743855646, + "grad_norm": 2.203125, + "learning_rate": 0.00017386693346673336, + "loss": 4.2637, + "step": 1312 + }, + { + "epoch": 0.5446437830550659, + "grad_norm": 1.8359375, + "learning_rate": 0.00017384692346173087, + "loss": 4.1538, + "step": 1313 + }, + { + "epoch": 0.545058591724567, + "grad_norm": 1.96875, + "learning_rate": 0.00017382691345672836, + "loss": 4.28, + "step": 1314 + }, + { + "epoch": 0.5454734003940682, + "grad_norm": 2.03125, + "learning_rate": 0.00017380690345172588, + "loss": 4.0767, + "step": 1315 + }, + { + "epoch": 0.5458882090635694, + "grad_norm": 2.140625, + "learning_rate": 0.00017378689344672336, + "loss": 4.4075, + "step": 1316 + }, + { + "epoch": 0.5463030177330707, + "grad_norm": 2.1875, + "learning_rate": 0.00017376688344172088, + "loss": 4.3724, + "step": 1317 + }, + { + "epoch": 0.5467178264025718, + "grad_norm": 2.140625, + "learning_rate": 0.00017374687343671836, + "loss": 4.4544, + "step": 1318 + }, + { + "epoch": 0.547132635072073, + "grad_norm": 1.953125, + "learning_rate": 0.00017372686343171588, + "loss": 4.268, + "step": 1319 + }, + { + "epoch": 0.5475474437415742, + "grad_norm": 1.984375, + "learning_rate": 0.00017370685342671337, + "loss": 4.1814, + "step": 1320 + }, + { + "epoch": 0.5479622524110754, + "grad_norm": 1.875, + "learning_rate": 0.00017368684342171085, + "loss": 4.368, + "step": 1321 + }, + { + "epoch": 0.5483770610805766, + "grad_norm": 2.03125, + "learning_rate": 0.00017366683341670834, + "loss": 4.2994, + "step": 1322 + }, + { + "epoch": 0.5487918697500778, + "grad_norm": 1.9296875, + "learning_rate": 0.00017364682341170585, + "loss": 4.2659, + "step": 1323 + }, + { + "epoch": 0.5492066784195789, + "grad_norm": 2.328125, + "learning_rate": 0.00017362681340670337, + "loss": 4.425, + "step": 1324 + }, + { + "epoch": 0.5496214870890802, + "grad_norm": 2.171875, + "learning_rate": 0.00017360680340170086, + "loss": 4.5258, + "step": 1325 + }, + { + "epoch": 0.5500362957585814, + "grad_norm": 2.1875, + "learning_rate": 0.00017358679339669837, + "loss": 4.526, + "step": 1326 + }, + { + "epoch": 0.5504511044280825, + "grad_norm": 1.8359375, + "learning_rate": 0.00017356678339169586, + "loss": 4.4056, + "step": 1327 + }, + { + "epoch": 0.5508659130975837, + "grad_norm": 1.9296875, + "learning_rate": 0.00017354677338669337, + "loss": 4.234, + "step": 1328 + }, + { + "epoch": 0.551280721767085, + "grad_norm": 1.8359375, + "learning_rate": 0.00017352676338169086, + "loss": 4.3617, + "step": 1329 + }, + { + "epoch": 0.5516955304365861, + "grad_norm": 2.015625, + "learning_rate": 0.00017350675337668835, + "loss": 4.4761, + "step": 1330 + }, + { + "epoch": 0.5521103391060873, + "grad_norm": 1.96875, + "learning_rate": 0.00017348674337168583, + "loss": 4.5065, + "step": 1331 + }, + { + "epoch": 0.5525251477755885, + "grad_norm": 1.9296875, + "learning_rate": 0.00017346673336668335, + "loss": 4.5396, + "step": 1332 + }, + { + "epoch": 0.5529399564450898, + "grad_norm": 1.890625, + "learning_rate": 0.00017344672336168084, + "loss": 4.8258, + "step": 1333 + }, + { + "epoch": 0.5533547651145909, + "grad_norm": 1.9453125, + "learning_rate": 0.00017342671335667835, + "loss": 4.424, + "step": 1334 + }, + { + "epoch": 0.5537695737840921, + "grad_norm": 2.046875, + "learning_rate": 0.00017340670335167584, + "loss": 4.4027, + "step": 1335 + }, + { + "epoch": 0.5541843824535932, + "grad_norm": 1.9375, + "learning_rate": 0.00017338669334667335, + "loss": 4.4958, + "step": 1336 + }, + { + "epoch": 0.5545991911230945, + "grad_norm": 1.9140625, + "learning_rate": 0.00017336668334167087, + "loss": 4.3067, + "step": 1337 + }, + { + "epoch": 0.5550139997925957, + "grad_norm": 1.9296875, + "learning_rate": 0.00017334667333666835, + "loss": 4.2119, + "step": 1338 + }, + { + "epoch": 0.5554288084620969, + "grad_norm": 1.8046875, + "learning_rate": 0.00017332666333166584, + "loss": 4.4246, + "step": 1339 + }, + { + "epoch": 0.555843617131598, + "grad_norm": 1.890625, + "learning_rate": 0.00017330665332666333, + "loss": 4.3445, + "step": 1340 + }, + { + "epoch": 0.5562584258010992, + "grad_norm": 2.171875, + "learning_rate": 0.00017328664332166084, + "loss": 4.487, + "step": 1341 + }, + { + "epoch": 0.5566732344706005, + "grad_norm": 1.796875, + "learning_rate": 0.00017326663331665833, + "loss": 4.316, + "step": 1342 + }, + { + "epoch": 0.5570880431401016, + "grad_norm": 1.9765625, + "learning_rate": 0.00017324662331165584, + "loss": 4.3887, + "step": 1343 + }, + { + "epoch": 0.5575028518096028, + "grad_norm": 1.8125, + "learning_rate": 0.00017322661330665333, + "loss": 4.2804, + "step": 1344 + }, + { + "epoch": 0.557917660479104, + "grad_norm": 2.0, + "learning_rate": 0.00017320660330165085, + "loss": 4.6823, + "step": 1345 + }, + { + "epoch": 0.5583324691486052, + "grad_norm": 2.0, + "learning_rate": 0.00017318659329664833, + "loss": 4.3116, + "step": 1346 + }, + { + "epoch": 0.5587472778181064, + "grad_norm": 1.8828125, + "learning_rate": 0.00017316658329164585, + "loss": 4.3569, + "step": 1347 + }, + { + "epoch": 0.5591620864876076, + "grad_norm": 2.0625, + "learning_rate": 0.0001731465732866433, + "loss": 4.3595, + "step": 1348 + }, + { + "epoch": 0.5595768951571087, + "grad_norm": 2.015625, + "learning_rate": 0.00017312656328164082, + "loss": 4.0869, + "step": 1349 + }, + { + "epoch": 0.55999170382661, + "grad_norm": 1.8828125, + "learning_rate": 0.0001731065532766383, + "loss": 4.3401, + "step": 1350 + }, + { + "epoch": 0.5604065124961112, + "grad_norm": 1.90625, + "learning_rate": 0.00017308654327163582, + "loss": 4.2857, + "step": 1351 + }, + { + "epoch": 0.5608213211656123, + "grad_norm": 2.09375, + "learning_rate": 0.00017306653326663334, + "loss": 4.1177, + "step": 1352 + }, + { + "epoch": 0.5612361298351135, + "grad_norm": 1.859375, + "learning_rate": 0.00017304652326163083, + "loss": 4.2515, + "step": 1353 + }, + { + "epoch": 0.5616509385046148, + "grad_norm": 2.046875, + "learning_rate": 0.00017302651325662834, + "loss": 4.122, + "step": 1354 + }, + { + "epoch": 0.562065747174116, + "grad_norm": 2.078125, + "learning_rate": 0.00017300650325162583, + "loss": 4.3245, + "step": 1355 + }, + { + "epoch": 0.5624805558436171, + "grad_norm": 1.8046875, + "learning_rate": 0.00017298649324662331, + "loss": 4.1449, + "step": 1356 + }, + { + "epoch": 0.5628953645131183, + "grad_norm": 2.109375, + "learning_rate": 0.0001729664832416208, + "loss": 4.3922, + "step": 1357 + }, + { + "epoch": 0.5633101731826196, + "grad_norm": 2.0, + "learning_rate": 0.00017294647323661832, + "loss": 4.2348, + "step": 1358 + }, + { + "epoch": 0.5637249818521207, + "grad_norm": 2.015625, + "learning_rate": 0.0001729264632316158, + "loss": 4.4701, + "step": 1359 + }, + { + "epoch": 0.5641397905216219, + "grad_norm": 1.8359375, + "learning_rate": 0.00017290645322661332, + "loss": 4.1999, + "step": 1360 + }, + { + "epoch": 0.564554599191123, + "grad_norm": 2.0625, + "learning_rate": 0.0001728864432216108, + "loss": 4.3161, + "step": 1361 + }, + { + "epoch": 0.5649694078606243, + "grad_norm": 2.0625, + "learning_rate": 0.00017286643321660832, + "loss": 4.1485, + "step": 1362 + }, + { + "epoch": 0.5653842165301255, + "grad_norm": 1.96875, + "learning_rate": 0.0001728464232116058, + "loss": 4.4301, + "step": 1363 + }, + { + "epoch": 0.5657990251996267, + "grad_norm": 1.9609375, + "learning_rate": 0.00017282641320660332, + "loss": 4.3702, + "step": 1364 + }, + { + "epoch": 0.5662138338691278, + "grad_norm": 1.8828125, + "learning_rate": 0.0001728064032016008, + "loss": 4.2414, + "step": 1365 + }, + { + "epoch": 0.5666286425386291, + "grad_norm": 1.9453125, + "learning_rate": 0.0001727863931965983, + "loss": 4.4038, + "step": 1366 + }, + { + "epoch": 0.5670434512081303, + "grad_norm": 1.8359375, + "learning_rate": 0.0001727663831915958, + "loss": 4.3009, + "step": 1367 + }, + { + "epoch": 0.5674582598776314, + "grad_norm": 1.921875, + "learning_rate": 0.0001727463731865933, + "loss": 4.3038, + "step": 1368 + }, + { + "epoch": 0.5678730685471326, + "grad_norm": 2.3125, + "learning_rate": 0.0001727263631815908, + "loss": 4.3313, + "step": 1369 + }, + { + "epoch": 0.5682878772166339, + "grad_norm": 1.9765625, + "learning_rate": 0.0001727063531765883, + "loss": 4.1563, + "step": 1370 + }, + { + "epoch": 0.568702685886135, + "grad_norm": 1.9765625, + "learning_rate": 0.0001726863431715858, + "loss": 4.2363, + "step": 1371 + }, + { + "epoch": 0.5691174945556362, + "grad_norm": 1.890625, + "learning_rate": 0.0001726663331665833, + "loss": 4.3705, + "step": 1372 + }, + { + "epoch": 0.5695323032251374, + "grad_norm": 2.0, + "learning_rate": 0.00017264632316158082, + "loss": 4.3854, + "step": 1373 + }, + { + "epoch": 0.5699471118946386, + "grad_norm": 2.09375, + "learning_rate": 0.0001726263131565783, + "loss": 4.2774, + "step": 1374 + }, + { + "epoch": 0.5703619205641398, + "grad_norm": 1.9609375, + "learning_rate": 0.0001726063031515758, + "loss": 4.2757, + "step": 1375 + }, + { + "epoch": 0.570776729233641, + "grad_norm": 1.890625, + "learning_rate": 0.00017258629314657328, + "loss": 4.673, + "step": 1376 + }, + { + "epoch": 0.5711915379031421, + "grad_norm": 2.03125, + "learning_rate": 0.0001725662831415708, + "loss": 4.1743, + "step": 1377 + }, + { + "epoch": 0.5716063465726434, + "grad_norm": 1.96875, + "learning_rate": 0.00017254627313656828, + "loss": 4.1254, + "step": 1378 + }, + { + "epoch": 0.5720211552421446, + "grad_norm": 2.078125, + "learning_rate": 0.0001725262631315658, + "loss": 4.196, + "step": 1379 + }, + { + "epoch": 0.5724359639116458, + "grad_norm": 1.84375, + "learning_rate": 0.0001725062531265633, + "loss": 4.2856, + "step": 1380 + }, + { + "epoch": 0.5728507725811469, + "grad_norm": 2.515625, + "learning_rate": 0.0001724862431215608, + "loss": 4.2915, + "step": 1381 + }, + { + "epoch": 0.5732655812506482, + "grad_norm": 1.8515625, + "learning_rate": 0.0001724662331165583, + "loss": 4.4261, + "step": 1382 + }, + { + "epoch": 0.5736803899201494, + "grad_norm": 2.21875, + "learning_rate": 0.00017244622311155577, + "loss": 4.2462, + "step": 1383 + }, + { + "epoch": 0.5740951985896505, + "grad_norm": 1.9453125, + "learning_rate": 0.00017242621310655328, + "loss": 4.4918, + "step": 1384 + }, + { + "epoch": 0.5745100072591517, + "grad_norm": 2.203125, + "learning_rate": 0.00017240620310155077, + "loss": 4.1641, + "step": 1385 + }, + { + "epoch": 0.5749248159286529, + "grad_norm": 1.8125, + "learning_rate": 0.00017238619309654829, + "loss": 4.4753, + "step": 1386 + }, + { + "epoch": 0.5753396245981541, + "grad_norm": 1.9140625, + "learning_rate": 0.00017236618309154577, + "loss": 4.144, + "step": 1387 + }, + { + "epoch": 0.5757544332676553, + "grad_norm": 1.890625, + "learning_rate": 0.0001723461730865433, + "loss": 4.3885, + "step": 1388 + }, + { + "epoch": 0.5761692419371565, + "grad_norm": 2.03125, + "learning_rate": 0.00017232616308154077, + "loss": 4.1634, + "step": 1389 + }, + { + "epoch": 0.5765840506066576, + "grad_norm": 2.0, + "learning_rate": 0.0001723061530765383, + "loss": 4.3891, + "step": 1390 + }, + { + "epoch": 0.5769988592761589, + "grad_norm": 1.9140625, + "learning_rate": 0.00017228614307153578, + "loss": 4.2899, + "step": 1391 + }, + { + "epoch": 0.5774136679456601, + "grad_norm": 2.015625, + "learning_rate": 0.00017226613306653326, + "loss": 4.2481, + "step": 1392 + }, + { + "epoch": 0.5778284766151612, + "grad_norm": 1.8828125, + "learning_rate": 0.00017224612306153078, + "loss": 4.3697, + "step": 1393 + }, + { + "epoch": 0.5782432852846624, + "grad_norm": 1.875, + "learning_rate": 0.00017222611305652826, + "loss": 4.5009, + "step": 1394 + }, + { + "epoch": 0.5786580939541637, + "grad_norm": 1.984375, + "learning_rate": 0.00017220610305152578, + "loss": 4.2218, + "step": 1395 + }, + { + "epoch": 0.5790729026236648, + "grad_norm": 2.109375, + "learning_rate": 0.00017218609304652327, + "loss": 4.5284, + "step": 1396 + }, + { + "epoch": 0.579487711293166, + "grad_norm": 1.8515625, + "learning_rate": 0.00017216608304152078, + "loss": 4.4089, + "step": 1397 + }, + { + "epoch": 0.5799025199626672, + "grad_norm": 1.71875, + "learning_rate": 0.00017214607303651827, + "loss": 4.3238, + "step": 1398 + }, + { + "epoch": 0.5803173286321684, + "grad_norm": 1.96875, + "learning_rate": 0.00017212606303151578, + "loss": 4.1448, + "step": 1399 + }, + { + "epoch": 0.5807321373016696, + "grad_norm": 1.9609375, + "learning_rate": 0.00017210605302651327, + "loss": 4.1475, + "step": 1400 + }, + { + "epoch": 0.5811469459711708, + "grad_norm": 1.765625, + "learning_rate": 0.00017208604302151076, + "loss": 4.3953, + "step": 1401 + }, + { + "epoch": 0.581561754640672, + "grad_norm": 2.015625, + "learning_rate": 0.00017206603301650824, + "loss": 4.4854, + "step": 1402 + }, + { + "epoch": 0.5819765633101732, + "grad_norm": 2.046875, + "learning_rate": 0.00017204602301150576, + "loss": 4.461, + "step": 1403 + }, + { + "epoch": 0.5823913719796744, + "grad_norm": 1.875, + "learning_rate": 0.00017202601300650325, + "loss": 4.3844, + "step": 1404 + }, + { + "epoch": 0.5828061806491756, + "grad_norm": 1.8671875, + "learning_rate": 0.00017200600300150076, + "loss": 4.6786, + "step": 1405 + }, + { + "epoch": 0.5832209893186767, + "grad_norm": 1.8828125, + "learning_rate": 0.00017198599299649825, + "loss": 4.3629, + "step": 1406 + }, + { + "epoch": 0.583635797988178, + "grad_norm": 2.078125, + "learning_rate": 0.00017196598299149576, + "loss": 4.1442, + "step": 1407 + }, + { + "epoch": 0.5840506066576792, + "grad_norm": 1.8515625, + "learning_rate": 0.00017194597298649328, + "loss": 4.3643, + "step": 1408 + }, + { + "epoch": 0.5844654153271803, + "grad_norm": 2.15625, + "learning_rate": 0.00017192596298149076, + "loss": 4.1961, + "step": 1409 + }, + { + "epoch": 0.5848802239966815, + "grad_norm": 1.9765625, + "learning_rate": 0.00017190595297648825, + "loss": 4.3112, + "step": 1410 + }, + { + "epoch": 0.5852950326661828, + "grad_norm": 2.109375, + "learning_rate": 0.00017188594297148574, + "loss": 4.1584, + "step": 1411 + }, + { + "epoch": 0.5857098413356839, + "grad_norm": 1.8828125, + "learning_rate": 0.00017186593296648325, + "loss": 4.4323, + "step": 1412 + }, + { + "epoch": 0.5861246500051851, + "grad_norm": 2.15625, + "learning_rate": 0.00017184592296148074, + "loss": 4.3019, + "step": 1413 + }, + { + "epoch": 0.5865394586746863, + "grad_norm": 2.125, + "learning_rate": 0.00017182591295647825, + "loss": 4.46, + "step": 1414 + }, + { + "epoch": 0.5869542673441875, + "grad_norm": 1.9453125, + "learning_rate": 0.00017180590295147574, + "loss": 4.3739, + "step": 1415 + }, + { + "epoch": 0.5873690760136887, + "grad_norm": 1.8359375, + "learning_rate": 0.00017178589294647326, + "loss": 4.2291, + "step": 1416 + }, + { + "epoch": 0.5877838846831899, + "grad_norm": 1.890625, + "learning_rate": 0.00017176588294147074, + "loss": 4.1138, + "step": 1417 + }, + { + "epoch": 0.588198693352691, + "grad_norm": 1.6953125, + "learning_rate": 0.00017174587293646823, + "loss": 4.2786, + "step": 1418 + }, + { + "epoch": 0.5886135020221923, + "grad_norm": 2.109375, + "learning_rate": 0.00017172586293146572, + "loss": 4.4354, + "step": 1419 + }, + { + "epoch": 0.5890283106916935, + "grad_norm": 1.8671875, + "learning_rate": 0.00017170585292646323, + "loss": 4.1951, + "step": 1420 + }, + { + "epoch": 0.5894431193611946, + "grad_norm": 1.8359375, + "learning_rate": 0.00017168584292146075, + "loss": 4.3176, + "step": 1421 + }, + { + "epoch": 0.5898579280306958, + "grad_norm": 1.9609375, + "learning_rate": 0.00017166583291645823, + "loss": 4.5763, + "step": 1422 + }, + { + "epoch": 0.5902727367001971, + "grad_norm": 1.9375, + "learning_rate": 0.00017164582291145575, + "loss": 4.2788, + "step": 1423 + }, + { + "epoch": 0.5906875453696983, + "grad_norm": 1.8828125, + "learning_rate": 0.00017162581290645324, + "loss": 4.3428, + "step": 1424 + }, + { + "epoch": 0.5911023540391994, + "grad_norm": 2.109375, + "learning_rate": 0.00017160580290145075, + "loss": 4.4421, + "step": 1425 + }, + { + "epoch": 0.5915171627087006, + "grad_norm": 1.8515625, + "learning_rate": 0.00017158579289644824, + "loss": 4.2542, + "step": 1426 + }, + { + "epoch": 0.5919319713782017, + "grad_norm": 1.7265625, + "learning_rate": 0.00017156578289144572, + "loss": 4.261, + "step": 1427 + }, + { + "epoch": 0.592346780047703, + "grad_norm": 2.265625, + "learning_rate": 0.0001715457728864432, + "loss": 4.2739, + "step": 1428 + }, + { + "epoch": 0.5927615887172042, + "grad_norm": 2.046875, + "learning_rate": 0.00017152576288144073, + "loss": 4.1583, + "step": 1429 + }, + { + "epoch": 0.5931763973867054, + "grad_norm": 1.8046875, + "learning_rate": 0.0001715057528764382, + "loss": 4.2409, + "step": 1430 + }, + { + "epoch": 0.5935912060562065, + "grad_norm": 2.234375, + "learning_rate": 0.00017148574287143573, + "loss": 4.3583, + "step": 1431 + }, + { + "epoch": 0.5940060147257078, + "grad_norm": 1.7265625, + "learning_rate": 0.00017146573286643322, + "loss": 4.0787, + "step": 1432 + }, + { + "epoch": 0.594420823395209, + "grad_norm": 1.9296875, + "learning_rate": 0.00017144572286143073, + "loss": 4.3969, + "step": 1433 + }, + { + "epoch": 0.5948356320647101, + "grad_norm": 2.28125, + "learning_rate": 0.00017142571285642824, + "loss": 4.4596, + "step": 1434 + }, + { + "epoch": 0.5952504407342113, + "grad_norm": 1.8984375, + "learning_rate": 0.00017140570285142573, + "loss": 4.4686, + "step": 1435 + }, + { + "epoch": 0.5956652494037126, + "grad_norm": 1.859375, + "learning_rate": 0.00017138569284642322, + "loss": 4.2836, + "step": 1436 + }, + { + "epoch": 0.5960800580732137, + "grad_norm": 2.53125, + "learning_rate": 0.0001713656828414207, + "loss": 4.5226, + "step": 1437 + }, + { + "epoch": 0.5964948667427149, + "grad_norm": 1.9609375, + "learning_rate": 0.00017134567283641822, + "loss": 4.6246, + "step": 1438 + }, + { + "epoch": 0.5969096754122161, + "grad_norm": 1.8046875, + "learning_rate": 0.0001713256628314157, + "loss": 4.3064, + "step": 1439 + }, + { + "epoch": 0.5973244840817173, + "grad_norm": 2.0625, + "learning_rate": 0.00017130565282641322, + "loss": 4.0792, + "step": 1440 + }, + { + "epoch": 0.5977392927512185, + "grad_norm": 1.90625, + "learning_rate": 0.0001712856428214107, + "loss": 4.2498, + "step": 1441 + }, + { + "epoch": 0.5981541014207197, + "grad_norm": 2.046875, + "learning_rate": 0.00017126563281640822, + "loss": 4.2767, + "step": 1442 + }, + { + "epoch": 0.5985689100902208, + "grad_norm": 1.921875, + "learning_rate": 0.0001712456228114057, + "loss": 4.2968, + "step": 1443 + }, + { + "epoch": 0.5989837187597221, + "grad_norm": 1.890625, + "learning_rate": 0.00017122561280640323, + "loss": 4.0411, + "step": 1444 + }, + { + "epoch": 0.5993985274292233, + "grad_norm": 1.84375, + "learning_rate": 0.0001712056028014007, + "loss": 4.0592, + "step": 1445 + }, + { + "epoch": 0.5998133360987244, + "grad_norm": 1.8203125, + "learning_rate": 0.0001711855927963982, + "loss": 4.2207, + "step": 1446 + }, + { + "epoch": 0.6002281447682256, + "grad_norm": 1.9453125, + "learning_rate": 0.0001711655827913957, + "loss": 4.2818, + "step": 1447 + }, + { + "epoch": 0.6006429534377269, + "grad_norm": 2.234375, + "learning_rate": 0.0001711455727863932, + "loss": 4.4416, + "step": 1448 + }, + { + "epoch": 0.601057762107228, + "grad_norm": 2.015625, + "learning_rate": 0.00017112556278139072, + "loss": 4.2443, + "step": 1449 + }, + { + "epoch": 0.6014725707767292, + "grad_norm": 2.078125, + "learning_rate": 0.0001711055527763882, + "loss": 4.3454, + "step": 1450 + }, + { + "epoch": 0.6018873794462304, + "grad_norm": 2.078125, + "learning_rate": 0.00017108554277138572, + "loss": 4.1842, + "step": 1451 + }, + { + "epoch": 0.6023021881157317, + "grad_norm": 2.234375, + "learning_rate": 0.0001710655327663832, + "loss": 4.4242, + "step": 1452 + }, + { + "epoch": 0.6027169967852328, + "grad_norm": 2.046875, + "learning_rate": 0.0001710455227613807, + "loss": 4.5832, + "step": 1453 + }, + { + "epoch": 0.603131805454734, + "grad_norm": 2.21875, + "learning_rate": 0.00017102551275637818, + "loss": 4.2929, + "step": 1454 + }, + { + "epoch": 0.6035466141242352, + "grad_norm": 1.90625, + "learning_rate": 0.0001710055027513757, + "loss": 4.1691, + "step": 1455 + }, + { + "epoch": 0.6039614227937364, + "grad_norm": 1.9921875, + "learning_rate": 0.00017098549274637318, + "loss": 4.5907, + "step": 1456 + }, + { + "epoch": 0.6043762314632376, + "grad_norm": 2.4375, + "learning_rate": 0.0001709654827413707, + "loss": 4.3613, + "step": 1457 + }, + { + "epoch": 0.6047910401327388, + "grad_norm": 1.9375, + "learning_rate": 0.00017094547273636818, + "loss": 4.2897, + "step": 1458 + }, + { + "epoch": 0.6052058488022399, + "grad_norm": 2.03125, + "learning_rate": 0.0001709254627313657, + "loss": 4.2352, + "step": 1459 + }, + { + "epoch": 0.6056206574717412, + "grad_norm": 1.7734375, + "learning_rate": 0.00017090545272636318, + "loss": 4.3734, + "step": 1460 + }, + { + "epoch": 0.6060354661412424, + "grad_norm": 2.1875, + "learning_rate": 0.0001708854427213607, + "loss": 4.2973, + "step": 1461 + }, + { + "epoch": 0.6064502748107435, + "grad_norm": 1.8359375, + "learning_rate": 0.00017086543271635819, + "loss": 4.355, + "step": 1462 + }, + { + "epoch": 0.6068650834802447, + "grad_norm": 1.9453125, + "learning_rate": 0.00017084542271135567, + "loss": 4.4822, + "step": 1463 + }, + { + "epoch": 0.607279892149746, + "grad_norm": 1.890625, + "learning_rate": 0.0001708254127063532, + "loss": 4.1659, + "step": 1464 + }, + { + "epoch": 0.6076947008192471, + "grad_norm": 1.921875, + "learning_rate": 0.00017080540270135067, + "loss": 4.313, + "step": 1465 + }, + { + "epoch": 0.6081095094887483, + "grad_norm": 1.984375, + "learning_rate": 0.0001707853926963482, + "loss": 4.4229, + "step": 1466 + }, + { + "epoch": 0.6085243181582495, + "grad_norm": 1.921875, + "learning_rate": 0.00017076538269134568, + "loss": 4.1758, + "step": 1467 + }, + { + "epoch": 0.6089391268277508, + "grad_norm": 1.96875, + "learning_rate": 0.0001707453726863432, + "loss": 4.2771, + "step": 1468 + }, + { + "epoch": 0.6093539354972519, + "grad_norm": 2.15625, + "learning_rate": 0.00017072536268134068, + "loss": 4.3834, + "step": 1469 + }, + { + "epoch": 0.6097687441667531, + "grad_norm": 1.796875, + "learning_rate": 0.0001707053526763382, + "loss": 4.1566, + "step": 1470 + }, + { + "epoch": 0.6101835528362543, + "grad_norm": 1.859375, + "learning_rate": 0.00017068534267133568, + "loss": 4.3964, + "step": 1471 + }, + { + "epoch": 0.6105983615057554, + "grad_norm": 1.9453125, + "learning_rate": 0.00017066533266633317, + "loss": 4.3178, + "step": 1472 + }, + { + "epoch": 0.6110131701752567, + "grad_norm": 1.96875, + "learning_rate": 0.00017064532266133065, + "loss": 4.335, + "step": 1473 + }, + { + "epoch": 0.6114279788447579, + "grad_norm": 1.9921875, + "learning_rate": 0.00017062531265632817, + "loss": 4.32, + "step": 1474 + }, + { + "epoch": 0.611842787514259, + "grad_norm": 1.875, + "learning_rate": 0.00017060530265132566, + "loss": 4.4379, + "step": 1475 + }, + { + "epoch": 0.6122575961837602, + "grad_norm": 2.078125, + "learning_rate": 0.00017058529264632317, + "loss": 4.3591, + "step": 1476 + }, + { + "epoch": 0.6126724048532615, + "grad_norm": 2.109375, + "learning_rate": 0.00017056528264132068, + "loss": 4.2203, + "step": 1477 + }, + { + "epoch": 0.6130872135227626, + "grad_norm": 2.015625, + "learning_rate": 0.00017054527263631817, + "loss": 4.5472, + "step": 1478 + }, + { + "epoch": 0.6135020221922638, + "grad_norm": 2.109375, + "learning_rate": 0.0001705252626313157, + "loss": 4.2897, + "step": 1479 + }, + { + "epoch": 0.613916830861765, + "grad_norm": 2.03125, + "learning_rate": 0.00017050525262631317, + "loss": 4.3161, + "step": 1480 + }, + { + "epoch": 0.6143316395312662, + "grad_norm": 2.1875, + "learning_rate": 0.00017048524262131066, + "loss": 4.2874, + "step": 1481 + }, + { + "epoch": 0.6147464482007674, + "grad_norm": 1.9296875, + "learning_rate": 0.00017046523261630815, + "loss": 4.4308, + "step": 1482 + }, + { + "epoch": 0.6151612568702686, + "grad_norm": 1.953125, + "learning_rate": 0.00017044522261130566, + "loss": 4.3472, + "step": 1483 + }, + { + "epoch": 0.6155760655397697, + "grad_norm": 1.828125, + "learning_rate": 0.00017042521260630315, + "loss": 4.1987, + "step": 1484 + }, + { + "epoch": 0.615990874209271, + "grad_norm": 2.078125, + "learning_rate": 0.00017040520260130066, + "loss": 4.4538, + "step": 1485 + }, + { + "epoch": 0.6164056828787722, + "grad_norm": 1.90625, + "learning_rate": 0.00017038519259629815, + "loss": 4.1197, + "step": 1486 + }, + { + "epoch": 0.6168204915482733, + "grad_norm": 2.171875, + "learning_rate": 0.00017036518259129567, + "loss": 4.5562, + "step": 1487 + }, + { + "epoch": 0.6172353002177745, + "grad_norm": 1.875, + "learning_rate": 0.00017034517258629315, + "loss": 4.0696, + "step": 1488 + }, + { + "epoch": 0.6176501088872758, + "grad_norm": 1.7421875, + "learning_rate": 0.00017032516258129064, + "loss": 4.1875, + "step": 1489 + }, + { + "epoch": 0.618064917556777, + "grad_norm": 1.8828125, + "learning_rate": 0.00017030515257628816, + "loss": 4.1146, + "step": 1490 + }, + { + "epoch": 0.6184797262262781, + "grad_norm": 1.8828125, + "learning_rate": 0.00017028514257128564, + "loss": 4.2917, + "step": 1491 + }, + { + "epoch": 0.6188945348957793, + "grad_norm": 1.8203125, + "learning_rate": 0.00017026513256628316, + "loss": 4.3181, + "step": 1492 + }, + { + "epoch": 0.6193093435652806, + "grad_norm": 1.921875, + "learning_rate": 0.00017024512256128064, + "loss": 4.4676, + "step": 1493 + }, + { + "epoch": 0.6197241522347817, + "grad_norm": 1.953125, + "learning_rate": 0.00017022511255627816, + "loss": 4.1105, + "step": 1494 + }, + { + "epoch": 0.6201389609042829, + "grad_norm": 1.9609375, + "learning_rate": 0.00017020510255127565, + "loss": 4.2254, + "step": 1495 + }, + { + "epoch": 0.620553769573784, + "grad_norm": 1.7578125, + "learning_rate": 0.00017018509254627316, + "loss": 4.242, + "step": 1496 + }, + { + "epoch": 0.6209685782432853, + "grad_norm": 1.84375, + "learning_rate": 0.00017016508254127065, + "loss": 4.3203, + "step": 1497 + }, + { + "epoch": 0.6213833869127865, + "grad_norm": 1.890625, + "learning_rate": 0.00017014507253626813, + "loss": 4.2968, + "step": 1498 + }, + { + "epoch": 0.6217981955822877, + "grad_norm": 1.9375, + "learning_rate": 0.00017012506253126562, + "loss": 4.5162, + "step": 1499 + }, + { + "epoch": 0.6222130042517888, + "grad_norm": 2.03125, + "learning_rate": 0.00017010505252626314, + "loss": 4.5584, + "step": 1500 + } + ], + "logging_steps": 1, + "max_steps": 10000, + "num_input_tokens_seen": 0, + "num_train_epochs": 5, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 4.976936553920717e+16, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}