|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.842105263157895, |
|
"eval_steps": 500, |
|
"global_step": 140, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03508771929824561, |
|
"grad_norm": 2.0672700448576804, |
|
"learning_rate": 0.0, |
|
"loss": 0.866, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.07017543859649122, |
|
"grad_norm": 2.2902360202645, |
|
"learning_rate": 7.142857142857143e-07, |
|
"loss": 1.043, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.10526315789473684, |
|
"grad_norm": 2.064587564315747, |
|
"learning_rate": 1.4285714285714286e-06, |
|
"loss": 0.9479, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.14035087719298245, |
|
"grad_norm": 2.1456251662755625, |
|
"learning_rate": 2.1428571428571427e-06, |
|
"loss": 1.0053, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.17543859649122806, |
|
"grad_norm": 2.118290312395124, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 1.0228, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 1.9727110398188104, |
|
"learning_rate": 3.5714285714285718e-06, |
|
"loss": 0.9994, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.24561403508771928, |
|
"grad_norm": 1.811780274017235, |
|
"learning_rate": 4.2857142857142855e-06, |
|
"loss": 0.9832, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.2807017543859649, |
|
"grad_norm": 1.60280335465616, |
|
"learning_rate": 5e-06, |
|
"loss": 1.0057, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.3157894736842105, |
|
"grad_norm": 1.4946232465242921, |
|
"learning_rate": 4.999302593030069e-06, |
|
"loss": 1.0198, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.3508771929824561, |
|
"grad_norm": 1.219848341043154, |
|
"learning_rate": 4.99721076122146e-06, |
|
"loss": 0.9945, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.38596491228070173, |
|
"grad_norm": 1.0507879904789832, |
|
"learning_rate": 4.99372567166064e-06, |
|
"loss": 0.8896, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 1.2565080469257828, |
|
"learning_rate": 4.98884926876821e-06, |
|
"loss": 0.9066, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.45614035087719296, |
|
"grad_norm": 1.3416635061508715, |
|
"learning_rate": 4.982584273214061e-06, |
|
"loss": 0.973, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.49122807017543857, |
|
"grad_norm": 1.1635476799198454, |
|
"learning_rate": 4.974934180399447e-06, |
|
"loss": 0.8802, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 1.2273198837996009, |
|
"learning_rate": 4.965903258506806e-06, |
|
"loss": 0.9058, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.5614035087719298, |
|
"grad_norm": 0.9397916351800223, |
|
"learning_rate": 4.955496546118439e-06, |
|
"loss": 0.851, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.5964912280701754, |
|
"grad_norm": 0.8970172914695361, |
|
"learning_rate": 4.943719849405347e-06, |
|
"loss": 0.959, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.631578947368421, |
|
"grad_norm": 0.8557735235888474, |
|
"learning_rate": 4.930579738887827e-06, |
|
"loss": 0.8591, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.8228693160594547, |
|
"learning_rate": 4.9160835457696075e-06, |
|
"loss": 0.9227, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.7017543859649122, |
|
"grad_norm": 0.9746558682735013, |
|
"learning_rate": 4.900239357847582e-06, |
|
"loss": 0.9185, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.7368421052631579, |
|
"grad_norm": 0.783405238430667, |
|
"learning_rate": 4.883056014999423e-06, |
|
"loss": 0.8853, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.7719298245614035, |
|
"grad_norm": 1.1560714233050355, |
|
"learning_rate": 4.864543104251587e-06, |
|
"loss": 0.837, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.8070175438596491, |
|
"grad_norm": 0.6779891999963303, |
|
"learning_rate": 4.844710954430464e-06, |
|
"loss": 0.9006, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.8421052631578947, |
|
"grad_norm": 0.6140368603132732, |
|
"learning_rate": 4.823570630399665e-06, |
|
"loss": 0.8713, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.8771929824561403, |
|
"grad_norm": 0.5959623784138675, |
|
"learning_rate": 4.8011339268866505e-06, |
|
"loss": 0.8573, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.9122807017543859, |
|
"grad_norm": 0.6645642684704514, |
|
"learning_rate": 4.777413361902152e-06, |
|
"loss": 0.9244, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.9473684210526315, |
|
"grad_norm": 0.5749254662708289, |
|
"learning_rate": 4.752422169756048e-06, |
|
"loss": 0.8214, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.9824561403508771, |
|
"grad_norm": 0.49471199829315, |
|
"learning_rate": 4.726174293673612e-06, |
|
"loss": 0.8501, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.49471199829315, |
|
"learning_rate": 4.698684378016223e-06, |
|
"loss": 0.7761, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 1.0350877192982457, |
|
"grad_norm": 0.8408225702174914, |
|
"learning_rate": 4.669967760110908e-06, |
|
"loss": 0.8702, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 1.0701754385964912, |
|
"grad_norm": 0.5019249070368396, |
|
"learning_rate": 4.6400404616932505e-06, |
|
"loss": 0.8376, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 1.1052631578947367, |
|
"grad_norm": 0.4303555090359822, |
|
"learning_rate": 4.608919179968457e-06, |
|
"loss": 0.8834, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 1.1403508771929824, |
|
"grad_norm": 0.446333451933402, |
|
"learning_rate": 4.576621278295558e-06, |
|
"loss": 0.8631, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 1.1754385964912282, |
|
"grad_norm": 0.4783577116779664, |
|
"learning_rate": 4.543164776499945e-06, |
|
"loss": 0.8283, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 1.2105263157894737, |
|
"grad_norm": 0.4079076293014929, |
|
"learning_rate": 4.508568340819654e-06, |
|
"loss": 0.8868, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 1.2456140350877192, |
|
"grad_norm": 0.40536005006600634, |
|
"learning_rate": 4.472851273490985e-06, |
|
"loss": 0.8674, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 1.280701754385965, |
|
"grad_norm": 0.4152500781803562, |
|
"learning_rate": 4.436033501979299e-06, |
|
"loss": 0.859, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.3157894736842106, |
|
"grad_norm": 0.4347010646426558, |
|
"learning_rate": 4.398135567860972e-06, |
|
"loss": 0.8254, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.3508771929824561, |
|
"grad_norm": 0.3788287405374111, |
|
"learning_rate": 4.359178615362725e-06, |
|
"loss": 0.7829, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.3859649122807016, |
|
"grad_norm": 0.4670693473167291, |
|
"learning_rate": 4.319184379564716e-06, |
|
"loss": 0.8918, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.4210526315789473, |
|
"grad_norm": 0.47835984972537143, |
|
"learning_rate": 4.278175174273989e-06, |
|
"loss": 0.8179, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.456140350877193, |
|
"grad_norm": 0.4499444593333873, |
|
"learning_rate": 4.236173879575022e-06, |
|
"loss": 0.7871, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.4912280701754386, |
|
"grad_norm": 0.3878622142611574, |
|
"learning_rate": 4.1932039290643534e-06, |
|
"loss": 0.7789, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.526315789473684, |
|
"grad_norm": 0.3316720314372251, |
|
"learning_rate": 4.149289296776369e-06, |
|
"loss": 0.91, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.5614035087719298, |
|
"grad_norm": 0.36941265676632506, |
|
"learning_rate": 4.104454483807579e-06, |
|
"loss": 0.8607, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.5964912280701755, |
|
"grad_norm": 0.3620626145776103, |
|
"learning_rate": 4.058724504646834e-06, |
|
"loss": 0.7937, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.631578947368421, |
|
"grad_norm": 0.3338291240702826, |
|
"learning_rate": 4.012124873219094e-06, |
|
"loss": 0.7815, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.472336322290524, |
|
"learning_rate": 3.964681588650562e-06, |
|
"loss": 0.8394, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.7017543859649122, |
|
"grad_norm": 0.3420578655212163, |
|
"learning_rate": 3.916421120763106e-06, |
|
"loss": 0.8502, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.736842105263158, |
|
"grad_norm": 0.38000589787590366, |
|
"learning_rate": 3.8673703953060685e-06, |
|
"loss": 0.8347, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.7719298245614035, |
|
"grad_norm": 0.3654714309302888, |
|
"learning_rate": 3.817556778933697e-06, |
|
"loss": 0.8581, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.807017543859649, |
|
"grad_norm": 0.3918540158095545, |
|
"learning_rate": 3.7670080639366e-06, |
|
"loss": 0.8769, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.8421052631578947, |
|
"grad_norm": 0.35070825923075866, |
|
"learning_rate": 3.7157524527357036e-06, |
|
"loss": 0.7163, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.8771929824561404, |
|
"grad_norm": 0.35412184971484173, |
|
"learning_rate": 3.663818542147409e-06, |
|
"loss": 0.8756, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.912280701754386, |
|
"grad_norm": 0.3580769918085387, |
|
"learning_rate": 3.61123530742869e-06, |
|
"loss": 0.8221, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.9473684210526314, |
|
"grad_norm": 0.33901277879515357, |
|
"learning_rate": 3.5580320861110627e-06, |
|
"loss": 0.8703, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.9824561403508771, |
|
"grad_norm": 0.3463865269661244, |
|
"learning_rate": 3.5042385616324243e-06, |
|
"loss": 0.8721, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.5122368904802833, |
|
"learning_rate": 3.4498847467759e-06, |
|
"loss": 0.8607, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 2.0350877192982457, |
|
"grad_norm": 0.32344238659596863, |
|
"learning_rate": 3.3950009669249502e-06, |
|
"loss": 0.7811, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 2.0701754385964914, |
|
"grad_norm": 0.35108823806290596, |
|
"learning_rate": 3.3396178431440572e-06, |
|
"loss": 0.9217, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 2.1052631578947367, |
|
"grad_norm": 0.30984522709441403, |
|
"learning_rate": 3.283766275094454e-06, |
|
"loss": 0.7985, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 2.1403508771929824, |
|
"grad_norm": 0.34747230260102224, |
|
"learning_rate": 3.227477423794412e-06, |
|
"loss": 0.7911, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 2.175438596491228, |
|
"grad_norm": 0.3139893028606255, |
|
"learning_rate": 3.1707826942337124e-06, |
|
"loss": 0.8368, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 2.2105263157894735, |
|
"grad_norm": 0.32074434133002094, |
|
"learning_rate": 3.1137137178519983e-06, |
|
"loss": 0.8602, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 2.245614035087719, |
|
"grad_norm": 0.33796734180843035, |
|
"learning_rate": 3.056302334890786e-06, |
|
"loss": 0.7477, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 2.280701754385965, |
|
"grad_norm": 0.308432104905017, |
|
"learning_rate": 2.9985805766289815e-06, |
|
"loss": 0.808, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 2.3157894736842106, |
|
"grad_norm": 0.3123660916517458, |
|
"learning_rate": 2.940580647511805e-06, |
|
"loss": 0.7912, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 2.3508771929824563, |
|
"grad_norm": 0.3406699289606851, |
|
"learning_rate": 2.8823349071831154e-06, |
|
"loss": 0.857, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 2.3859649122807016, |
|
"grad_norm": 0.3063945394190306, |
|
"learning_rate": 2.8238758524311316e-06, |
|
"loss": 0.7817, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 2.4210526315789473, |
|
"grad_norm": 0.31851738409569996, |
|
"learning_rate": 2.7652360990576457e-06, |
|
"loss": 0.9036, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.456140350877193, |
|
"grad_norm": 0.3145120103359264, |
|
"learning_rate": 2.7064483636808314e-06, |
|
"loss": 0.8149, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 2.4912280701754383, |
|
"grad_norm": 0.3050076260239972, |
|
"learning_rate": 2.6475454454818072e-06, |
|
"loss": 0.8372, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 2.526315789473684, |
|
"grad_norm": 0.30681790274996473, |
|
"learning_rate": 2.5885602079051354e-06, |
|
"loss": 0.7604, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 2.56140350877193, |
|
"grad_norm": 0.314110412232156, |
|
"learning_rate": 2.529525560323462e-06, |
|
"loss": 0.8005, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 2.5964912280701755, |
|
"grad_norm": 0.3236785830234266, |
|
"learning_rate": 2.470474439676539e-06, |
|
"loss": 0.8168, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.6315789473684212, |
|
"grad_norm": 0.359656431397916, |
|
"learning_rate": 2.411439792094866e-06, |
|
"loss": 0.8402, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.32366656133415905, |
|
"learning_rate": 2.3524545545181936e-06, |
|
"loss": 0.8668, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.7017543859649122, |
|
"grad_norm": 0.3262664895219039, |
|
"learning_rate": 2.2935516363191695e-06, |
|
"loss": 0.8343, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 2.736842105263158, |
|
"grad_norm": 0.3081673146324169, |
|
"learning_rate": 2.234763900942355e-06, |
|
"loss": 0.8084, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 2.7719298245614032, |
|
"grad_norm": 0.32923845454809364, |
|
"learning_rate": 2.1761241475688697e-06, |
|
"loss": 0.7564, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 2.807017543859649, |
|
"grad_norm": 0.3103638917941097, |
|
"learning_rate": 2.117665092816885e-06, |
|
"loss": 0.7423, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 2.8421052631578947, |
|
"grad_norm": 0.32527138845292275, |
|
"learning_rate": 2.059419352488196e-06, |
|
"loss": 0.8292, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 2.8771929824561404, |
|
"grad_norm": 0.31114889374241994, |
|
"learning_rate": 2.0014194233710193e-06, |
|
"loss": 0.8229, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 2.912280701754386, |
|
"grad_norm": 0.3119716096545865, |
|
"learning_rate": 1.9436976651092143e-06, |
|
"loss": 0.829, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 2.9473684210526314, |
|
"grad_norm": 0.31910821914661397, |
|
"learning_rate": 1.8862862821480023e-06, |
|
"loss": 0.7703, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.982456140350877, |
|
"grad_norm": 0.31035281270338383, |
|
"learning_rate": 1.829217305766289e-06, |
|
"loss": 0.8182, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.31035281270338383, |
|
"learning_rate": 1.772522576205589e-06, |
|
"loss": 0.6834, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 3.0350877192982457, |
|
"grad_norm": 0.6381501820358972, |
|
"learning_rate": 1.7162337249055478e-06, |
|
"loss": 0.7985, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 3.0701754385964914, |
|
"grad_norm": 0.34108098057965985, |
|
"learning_rate": 1.6603821568559436e-06, |
|
"loss": 0.8813, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 3.1052631578947367, |
|
"grad_norm": 0.3119218127003693, |
|
"learning_rate": 1.6049990330750508e-06, |
|
"loss": 0.8332, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 3.1403508771929824, |
|
"grad_norm": 0.31753762794893836, |
|
"learning_rate": 1.5501152532241003e-06, |
|
"loss": 0.8085, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 3.175438596491228, |
|
"grad_norm": 0.26210667369890633, |
|
"learning_rate": 1.495761438367577e-06, |
|
"loss": 0.7937, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 3.2105263157894735, |
|
"grad_norm": 0.3412780118816683, |
|
"learning_rate": 1.4419679138889379e-06, |
|
"loss": 0.9124, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 3.245614035087719, |
|
"grad_norm": 0.3273535475843529, |
|
"learning_rate": 1.3887646925713116e-06, |
|
"loss": 0.771, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 3.280701754385965, |
|
"grad_norm": 0.301251838670515, |
|
"learning_rate": 1.3361814578525922e-06, |
|
"loss": 0.7657, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 3.3157894736842106, |
|
"grad_norm": 0.362998311924545, |
|
"learning_rate": 1.2842475472642969e-06, |
|
"loss": 0.8489, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 3.3508771929824563, |
|
"grad_norm": 0.29122608306651043, |
|
"learning_rate": 1.2329919360634003e-06, |
|
"loss": 0.8135, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 3.3859649122807016, |
|
"grad_norm": 0.29475940071935713, |
|
"learning_rate": 1.182443221066303e-06, |
|
"loss": 0.7059, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 3.4210526315789473, |
|
"grad_norm": 0.28973717877011734, |
|
"learning_rate": 1.1326296046939334e-06, |
|
"loss": 0.7342, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 3.456140350877193, |
|
"grad_norm": 0.30995625777137437, |
|
"learning_rate": 1.083578879236895e-06, |
|
"loss": 0.7411, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 3.4912280701754383, |
|
"grad_norm": 0.2859432013165172, |
|
"learning_rate": 1.0353184113494386e-06, |
|
"loss": 0.772, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 3.526315789473684, |
|
"grad_norm": 0.30700142822195303, |
|
"learning_rate": 9.878751267809069e-07, |
|
"loss": 0.7557, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 3.56140350877193, |
|
"grad_norm": 0.27176920609301375, |
|
"learning_rate": 9.412754953531664e-07, |
|
"loss": 0.7729, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 3.5964912280701755, |
|
"grad_norm": 0.36537897752504017, |
|
"learning_rate": 8.955455161924217e-07, |
|
"loss": 0.7803, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 3.6315789473684212, |
|
"grad_norm": 0.32335411827201516, |
|
"learning_rate": 8.507107032236323e-07, |
|
"loss": 0.6832, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 3.6666666666666665, |
|
"grad_norm": 0.31630275273581865, |
|
"learning_rate": 8.067960709356479e-07, |
|
"loss": 0.7744, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 3.7017543859649122, |
|
"grad_norm": 0.3116184912362651, |
|
"learning_rate": 7.638261204249783e-07, |
|
"loss": 0.7719, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 3.736842105263158, |
|
"grad_norm": 0.31857809248659025, |
|
"learning_rate": 7.218248257260127e-07, |
|
"loss": 0.8335, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 3.7719298245614032, |
|
"grad_norm": 0.28756539469934045, |
|
"learning_rate": 6.808156204352845e-07, |
|
"loss": 0.857, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 3.807017543859649, |
|
"grad_norm": 0.30352272859730434, |
|
"learning_rate": 6.40821384637276e-07, |
|
"loss": 0.8382, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 3.8421052631578947, |
|
"grad_norm": 0.31531467423763393, |
|
"learning_rate": 6.018644321390288e-07, |
|
"loss": 0.823, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 3.8771929824561404, |
|
"grad_norm": 0.2935691411718206, |
|
"learning_rate": 5.639664980207024e-07, |
|
"loss": 0.7658, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 3.912280701754386, |
|
"grad_norm": 0.3148692436878785, |
|
"learning_rate": 5.271487265090163e-07, |
|
"loss": 0.7759, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 3.9473684210526314, |
|
"grad_norm": 0.28756831417145856, |
|
"learning_rate": 4.914316591803475e-07, |
|
"loss": 0.7931, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 3.982456140350877, |
|
"grad_norm": 0.2919237741849252, |
|
"learning_rate": 4.5683522350005505e-07, |
|
"loss": 0.81, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.4951765857412171, |
|
"learning_rate": 4.23378721704443e-07, |
|
"loss": 0.7612, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 4.035087719298246, |
|
"grad_norm": 0.3170121765126596, |
|
"learning_rate": 3.910808200315433e-07, |
|
"loss": 0.768, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 4.0701754385964914, |
|
"grad_norm": 0.31408721468965906, |
|
"learning_rate": 3.5995953830675004e-07, |
|
"loss": 0.7892, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 4.105263157894737, |
|
"grad_norm": 0.3109955858522752, |
|
"learning_rate": 3.3003223988909234e-07, |
|
"loss": 0.7996, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 4.140350877192983, |
|
"grad_norm": 0.2871221309729222, |
|
"learning_rate": 3.0131562198377763e-07, |
|
"loss": 0.7783, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 4.175438596491228, |
|
"grad_norm": 0.30722503094595294, |
|
"learning_rate": 2.7382570632638853e-07, |
|
"loss": 0.7592, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 4.2105263157894735, |
|
"grad_norm": 0.3009611047182384, |
|
"learning_rate": 2.4757783024395244e-07, |
|
"loss": 0.7143, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 4.245614035087719, |
|
"grad_norm": 0.27752844097535945, |
|
"learning_rate": 2.2258663809784892e-07, |
|
"loss": 0.7798, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 4.280701754385965, |
|
"grad_norm": 0.3189621174964373, |
|
"learning_rate": 1.9886607311334987e-07, |
|
"loss": 0.8131, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 4.315789473684211, |
|
"grad_norm": 0.2877000579766296, |
|
"learning_rate": 1.764293696003358e-07, |
|
"loss": 0.8553, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 4.350877192982456, |
|
"grad_norm": 0.31793864581348175, |
|
"learning_rate": 1.552890455695369e-07, |
|
"loss": 0.787, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 4.385964912280702, |
|
"grad_norm": 0.31801352887161766, |
|
"learning_rate": 1.3545689574841341e-07, |
|
"loss": 0.796, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 4.421052631578947, |
|
"grad_norm": 0.29288292733940474, |
|
"learning_rate": 1.1694398500057714e-07, |
|
"loss": 0.8024, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 4.456140350877193, |
|
"grad_norm": 0.2758876534828929, |
|
"learning_rate": 9.976064215241859e-08, |
|
"loss": 0.8047, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 4.491228070175438, |
|
"grad_norm": 0.285005356287463, |
|
"learning_rate": 8.391645423039357e-08, |
|
"loss": 0.8661, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 4.526315789473684, |
|
"grad_norm": 0.2944312916380034, |
|
"learning_rate": 6.94202611121736e-08, |
|
"loss": 0.7682, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 4.56140350877193, |
|
"grad_norm": 0.28730681584700213, |
|
"learning_rate": 5.628015059465364e-08, |
|
"loss": 0.7931, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 4.5964912280701755, |
|
"grad_norm": 0.30107118420956586, |
|
"learning_rate": 4.450345388156141e-08, |
|
"loss": 0.6879, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 4.631578947368421, |
|
"grad_norm": 0.2889397509518265, |
|
"learning_rate": 3.4096741493194196e-08, |
|
"loss": 0.7043, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 4.666666666666667, |
|
"grad_norm": 0.2928561207653839, |
|
"learning_rate": 2.506581960055432e-08, |
|
"loss": 0.7708, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 4.701754385964913, |
|
"grad_norm": 0.32712132619376316, |
|
"learning_rate": 1.7415726785939836e-08, |
|
"loss": 0.7557, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 4.7368421052631575, |
|
"grad_norm": 0.3032894032753465, |
|
"learning_rate": 1.115073123179128e-08, |
|
"loss": 0.804, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 4.771929824561403, |
|
"grad_norm": 0.2684794744258281, |
|
"learning_rate": 6.274328339360702e-09, |
|
"loss": 0.8301, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 4.807017543859649, |
|
"grad_norm": 0.3086723531529243, |
|
"learning_rate": 2.789238778540537e-09, |
|
"loss": 0.7836, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 4.842105263157895, |
|
"grad_norm": 0.288846758812861, |
|
"learning_rate": 6.974069699314246e-10, |
|
"loss": 0.7588, |
|
"step": 140 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 140, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 63037458677760.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|