|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 6014, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.08855848014354706, |
|
"learning_rate": 0.0004999991472481453, |
|
"loss": 2.6557, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.07915773242712021, |
|
"learning_rate": 0.0004999965889983988, |
|
"loss": 2.4772, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.12245654314756393, |
|
"learning_rate": 0.0004999923252682129, |
|
"loss": 2.4706, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.10732001066207886, |
|
"learning_rate": 0.0004999863560866746, |
|
"loss": 2.205, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.13363118469715118, |
|
"learning_rate": 0.0004999786814945059, |
|
"loss": 2.3767, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.09244401752948761, |
|
"learning_rate": 0.0004999693015440632, |
|
"loss": 2.3077, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0993221327662468, |
|
"learning_rate": 0.0004999582162993363, |
|
"loss": 2.3961, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.09801439195871353, |
|
"learning_rate": 0.0004999454258359492, |
|
"loss": 2.303, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.1130354180932045, |
|
"learning_rate": 0.0004999309302411583, |
|
"loss": 2.3285, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.15056972205638885, |
|
"learning_rate": 0.0004999147296138531, |
|
"loss": 2.4721, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.13582268357276917, |
|
"learning_rate": 0.0004998968240645544, |
|
"loss": 2.4008, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.13640083372592926, |
|
"learning_rate": 0.0004998772137154141, |
|
"loss": 2.3995, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.12711608409881592, |
|
"learning_rate": 0.0004998558987002143, |
|
"loss": 2.356, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.19234400987625122, |
|
"learning_rate": 0.0004998328791643663, |
|
"loss": 2.2295, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.1442757099866867, |
|
"learning_rate": 0.0004998081552649098, |
|
"loss": 2.4544, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.18835151195526123, |
|
"learning_rate": 0.0004997817271705116, |
|
"loss": 2.3927, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.13745005428791046, |
|
"learning_rate": 0.0004997535950614643, |
|
"loss": 2.3474, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.1309432089328766, |
|
"learning_rate": 0.0004997237591296861, |
|
"loss": 2.3056, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.1625588983297348, |
|
"learning_rate": 0.0004996922195787177, |
|
"loss": 2.4774, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.1700213998556137, |
|
"learning_rate": 0.0004996589766237225, |
|
"loss": 2.3015, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.15182705223560333, |
|
"learning_rate": 0.0004996240304914846, |
|
"loss": 2.4763, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.15341205894947052, |
|
"learning_rate": 0.0004995873814204069, |
|
"loss": 2.3662, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.14351683855056763, |
|
"learning_rate": 0.0004995490296605098, |
|
"loss": 1.967, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.19977103173732758, |
|
"learning_rate": 0.0004995089754734297, |
|
"loss": 2.4405, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.16674907505512238, |
|
"learning_rate": 0.0004994672191324169, |
|
"loss": 2.3259, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.1730051189661026, |
|
"learning_rate": 0.0004994237609223337, |
|
"loss": 2.457, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.22182777523994446, |
|
"learning_rate": 0.0004993786011396528, |
|
"loss": 2.4724, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.1570582240819931, |
|
"learning_rate": 0.0004993317400924547, |
|
"loss": 2.3075, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.14569781720638275, |
|
"learning_rate": 0.0004992831781004262, |
|
"loss": 2.1955, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.19999592006206512, |
|
"learning_rate": 0.000499232915494858, |
|
"loss": 2.4308, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.20882728695869446, |
|
"learning_rate": 0.0004991809526186424, |
|
"loss": 2.2685, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.15529118478298187, |
|
"learning_rate": 0.0004991272898262708, |
|
"loss": 2.4197, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.20917291939258575, |
|
"learning_rate": 0.0004990719274838315, |
|
"loss": 2.111, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.16249670088291168, |
|
"learning_rate": 0.0004990148659690073, |
|
"loss": 2.1891, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.18368925154209137, |
|
"learning_rate": 0.0004989561056710729, |
|
"loss": 2.445, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.21063920855522156, |
|
"learning_rate": 0.0004988956469908916, |
|
"loss": 2.2818, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.16046880185604095, |
|
"learning_rate": 0.0004988334903409137, |
|
"loss": 2.3213, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.1568533182144165, |
|
"learning_rate": 0.0004987696361451725, |
|
"loss": 2.2785, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.2682538330554962, |
|
"learning_rate": 0.0004987040848392824, |
|
"loss": 2.3157, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.2094998061656952, |
|
"learning_rate": 0.0004986368368704355, |
|
"loss": 2.4451, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.17797984182834625, |
|
"learning_rate": 0.0004985678926973982, |
|
"loss": 2.4541, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.1685200035572052, |
|
"learning_rate": 0.000498497252790509, |
|
"loss": 2.5848, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.18856477737426758, |
|
"learning_rate": 0.0004984249176316741, |
|
"loss": 2.2977, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1670752465724945, |
|
"learning_rate": 0.0004983508877143651, |
|
"loss": 2.2476, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.20539650321006775, |
|
"learning_rate": 0.0004982751635436152, |
|
"loss": 2.4784, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.19674716889858246, |
|
"learning_rate": 0.000498197745636016, |
|
"loss": 2.3826, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1808764785528183, |
|
"learning_rate": 0.0004981186345197133, |
|
"loss": 2.1915, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1649690568447113, |
|
"learning_rate": 0.0004980378307344044, |
|
"loss": 2.3101, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.19910891354084015, |
|
"learning_rate": 0.0004979553348313341, |
|
"loss": 2.3597, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.17429497838020325, |
|
"learning_rate": 0.0004978711473732906, |
|
"loss": 2.4553, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.17228329181671143, |
|
"learning_rate": 0.000497785268934602, |
|
"loss": 2.4496, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.18831633031368256, |
|
"learning_rate": 0.0004976977001011321, |
|
"loss": 2.2841, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.1668764352798462, |
|
"learning_rate": 0.000497608441470277, |
|
"loss": 2.5694, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.17521929740905762, |
|
"learning_rate": 0.0004975174936509602, |
|
"loss": 2.3644, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.1602141261100769, |
|
"learning_rate": 0.0004974248572636292, |
|
"loss": 2.2046, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.2694706618785858, |
|
"learning_rate": 0.0004973305329402509, |
|
"loss": 2.3489, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.19102543592453003, |
|
"learning_rate": 0.0004972345213243071, |
|
"loss": 2.4592, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.15223553776741028, |
|
"learning_rate": 0.0004971368230707905, |
|
"loss": 2.4645, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.21530242264270782, |
|
"learning_rate": 0.0004970374388462, |
|
"loss": 2.2222, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.15933960676193237, |
|
"learning_rate": 0.0004969363693285363, |
|
"loss": 2.2653, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.2127295583486557, |
|
"learning_rate": 0.0004968336152072971, |
|
"loss": 2.3073, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.18392804265022278, |
|
"learning_rate": 0.0004967291771834727, |
|
"loss": 2.1329, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.15626537799835205, |
|
"learning_rate": 0.0004966230559695406, |
|
"loss": 2.2159, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.19132393598556519, |
|
"learning_rate": 0.0004965152522894615, |
|
"loss": 2.476, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.20226113498210907, |
|
"learning_rate": 0.0004964057668786736, |
|
"loss": 2.3081, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.1876431554555893, |
|
"learning_rate": 0.000496294600484088, |
|
"loss": 2.3597, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.16817514598369598, |
|
"learning_rate": 0.0004961817538640836, |
|
"loss": 2.2757, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.18912410736083984, |
|
"learning_rate": 0.0004960672277885016, |
|
"loss": 2.3976, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.2327001988887787, |
|
"learning_rate": 0.0004959510230386406, |
|
"loss": 2.3661, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.2154412865638733, |
|
"learning_rate": 0.0004958331404072511, |
|
"loss": 2.3553, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.18963538110256195, |
|
"learning_rate": 0.0004957135806985303, |
|
"loss": 2.166, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.19671855866909027, |
|
"learning_rate": 0.0004955923447281162, |
|
"loss": 2.4749, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.20475703477859497, |
|
"learning_rate": 0.0004954694333230824, |
|
"loss": 2.2721, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.17598754167556763, |
|
"learning_rate": 0.0004953448473219323, |
|
"loss": 2.3505, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.21133027970790863, |
|
"learning_rate": 0.0004952185875745934, |
|
"loss": 2.3477, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.2046145498752594, |
|
"learning_rate": 0.0004950906549424119, |
|
"loss": 2.3582, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.19380740821361542, |
|
"learning_rate": 0.0004949610502981457, |
|
"loss": 2.4032, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.17790696024894714, |
|
"learning_rate": 0.0004948297745259598, |
|
"loss": 2.5021, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.15176743268966675, |
|
"learning_rate": 0.0004946968285214194, |
|
"loss": 2.2358, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.20856505632400513, |
|
"learning_rate": 0.0004945622131914843, |
|
"loss": 2.2877, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.2029004991054535, |
|
"learning_rate": 0.0004944259294545019, |
|
"loss": 2.4357, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.15777291357517242, |
|
"learning_rate": 0.0004942879782402022, |
|
"loss": 2.34, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.18712708353996277, |
|
"learning_rate": 0.0004941483604896904, |
|
"loss": 2.3396, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.20823413133621216, |
|
"learning_rate": 0.0004940070771554408, |
|
"loss": 2.3517, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3073880970478058, |
|
"learning_rate": 0.0004938641292012904, |
|
"loss": 2.1421, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.2047368884086609, |
|
"learning_rate": 0.0004937195176024323, |
|
"loss": 2.3541, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.1642436683177948, |
|
"learning_rate": 0.0004935732433454088, |
|
"loss": 2.3114, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.24149803817272186, |
|
"learning_rate": 0.0004934253074281054, |
|
"loss": 2.0895, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.18440380692481995, |
|
"learning_rate": 0.0004932757108597428, |
|
"loss": 2.3473, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.18851065635681152, |
|
"learning_rate": 0.0004931244546608711, |
|
"loss": 2.7386, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.2015257179737091, |
|
"learning_rate": 0.0004929715398633624, |
|
"loss": 2.5105, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.2113213837146759, |
|
"learning_rate": 0.0004928169675104037, |
|
"loss": 2.2766, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.16831834614276886, |
|
"learning_rate": 0.0004926607386564898, |
|
"loss": 2.4742, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.22336164116859436, |
|
"learning_rate": 0.0004925028543674164, |
|
"loss": 2.5234, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.19430772960186005, |
|
"learning_rate": 0.0004923433157202723, |
|
"loss": 2.1902, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1814448982477188, |
|
"learning_rate": 0.0004921821238034326, |
|
"loss": 2.3091, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.2538805305957794, |
|
"learning_rate": 0.000492019279716551, |
|
"loss": 2.3677, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.22240865230560303, |
|
"learning_rate": 0.0004918547845705523, |
|
"loss": 1.6151, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.21643097698688507, |
|
"learning_rate": 0.0004916886394876247, |
|
"loss": 2.3328, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1595735102891922, |
|
"learning_rate": 0.0004915208456012125, |
|
"loss": 2.2963, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.1994553655385971, |
|
"learning_rate": 0.0004913514040560081, |
|
"loss": 2.4716, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.19502036273479462, |
|
"learning_rate": 0.000491180316007944, |
|
"loss": 2.2055, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.33217477798461914, |
|
"learning_rate": 0.0004910075826241858, |
|
"loss": 2.3901, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.17329910397529602, |
|
"learning_rate": 0.0004908332050831229, |
|
"loss": 2.4412, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.19649653136730194, |
|
"learning_rate": 0.0004906571845743616, |
|
"loss": 2.5036, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.20772477984428406, |
|
"learning_rate": 0.0004904795222987164, |
|
"loss": 2.2438, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.17279602587223053, |
|
"learning_rate": 0.000490300219468202, |
|
"loss": 2.2795, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.1734314262866974, |
|
"learning_rate": 0.0004901192773060249, |
|
"loss": 2.286, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.20874248445034027, |
|
"learning_rate": 0.0004899366970465753, |
|
"loss": 2.335, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.17237204313278198, |
|
"learning_rate": 0.0004897524799354184, |
|
"loss": 2.5809, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.19388693571090698, |
|
"learning_rate": 0.0004895666272292862, |
|
"loss": 2.4632, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.17896869778633118, |
|
"learning_rate": 0.0004893791401960683, |
|
"loss": 1.9323, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.2121048867702484, |
|
"learning_rate": 0.0004891900201148043, |
|
"loss": 2.1472, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.20368768274784088, |
|
"learning_rate": 0.000488999268275674, |
|
"loss": 2.3966, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.1953553706407547, |
|
"learning_rate": 0.0004888068859799895, |
|
"loss": 2.1608, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.168823704123497, |
|
"learning_rate": 0.0004886128745401855, |
|
"loss": 2.572, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.213327556848526, |
|
"learning_rate": 0.000488417235279811, |
|
"loss": 2.4827, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.19623854756355286, |
|
"learning_rate": 0.00048821996953352, |
|
"loss": 2.3122, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.15413519740104675, |
|
"learning_rate": 0.00048802107864706217, |
|
"loss": 2.3699, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.1929747462272644, |
|
"learning_rate": 0.00048782056397727425, |
|
"loss": 2.462, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.19839173555374146, |
|
"learning_rate": 0.0004876184268920702, |
|
"loss": 2.2265, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.1684265285730362, |
|
"learning_rate": 0.0004874146687704323, |
|
"loss": 2.0534, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.19233009219169617, |
|
"learning_rate": 0.0004872092910024014, |
|
"loss": 2.2648, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.1821518987417221, |
|
"learning_rate": 0.0004870022949890676, |
|
"loss": 2.1656, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.17291715741157532, |
|
"learning_rate": 0.000486793682142561, |
|
"loss": 2.2383, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.18019358813762665, |
|
"learning_rate": 0.00048658345388604134, |
|
"loss": 2.4353, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.2849899232387543, |
|
"learning_rate": 0.00048637161165368905, |
|
"loss": 2.331, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3656805753707886, |
|
"learning_rate": 0.00048615815689069487, |
|
"loss": 2.3611, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.18143434822559357, |
|
"learning_rate": 0.0004859430910532504, |
|
"loss": 2.4043, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.1801663637161255, |
|
"learning_rate": 0.00048572641560853805, |
|
"loss": 2.2429, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.16966284811496735, |
|
"learning_rate": 0.0004855081320347207, |
|
"loss": 2.3253, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.25287890434265137, |
|
"learning_rate": 0.0004852882418209323, |
|
"loss": 2.2341, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.1766708791255951, |
|
"learning_rate": 0.0004850667464672672, |
|
"loss": 2.6625, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.23353531956672668, |
|
"learning_rate": 0.00048484364748476995, |
|
"loss": 1.9082, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.19202041625976562, |
|
"learning_rate": 0.00048461894639542505, |
|
"loss": 2.5067, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.75723135471344, |
|
"learning_rate": 0.0004843926447321466, |
|
"loss": 2.1948, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.18087565898895264, |
|
"learning_rate": 0.0004841647440387681, |
|
"loss": 2.273, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.20649802684783936, |
|
"learning_rate": 0.0004839352458700314, |
|
"loss": 2.3464, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.19096232950687408, |
|
"learning_rate": 0.0004837041517915762, |
|
"loss": 2.1913, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.1938013732433319, |
|
"learning_rate": 0.0004834714633799301, |
|
"loss": 2.1721, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3532910943031311, |
|
"learning_rate": 0.00048323718222249655, |
|
"loss": 2.0894, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.1876286268234253, |
|
"learning_rate": 0.00048300130991754534, |
|
"loss": 2.4283, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.1776292324066162, |
|
"learning_rate": 0.0004827638480742007, |
|
"loss": 2.2125, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.22727273404598236, |
|
"learning_rate": 0.0004825247983124309, |
|
"loss": 2.2988, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.16823545098304749, |
|
"learning_rate": 0.00048228416226303697, |
|
"loss": 2.1763, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.21675339341163635, |
|
"learning_rate": 0.00048204194156764146, |
|
"loss": 2.4963, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.1901399940252304, |
|
"learning_rate": 0.0004817981378786778, |
|
"loss": 2.2221, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.20318692922592163, |
|
"learning_rate": 0.00048155275285937814, |
|
"loss": 2.601, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.17421816289424896, |
|
"learning_rate": 0.0004813057881837629, |
|
"loss": 2.3034, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.2204848974943161, |
|
"learning_rate": 0.00048105724553662864, |
|
"loss": 2.2784, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.21678051352500916, |
|
"learning_rate": 0.00048080712661353707, |
|
"loss": 2.1565, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.18651427328586578, |
|
"learning_rate": 0.0004805554331208032, |
|
"loss": 2.2581, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.2580089569091797, |
|
"learning_rate": 0.00048030216677548367, |
|
"loss": 2.0844, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.20590724050998688, |
|
"learning_rate": 0.00048004732930536525, |
|
"loss": 2.1468, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.21979926526546478, |
|
"learning_rate": 0.00047979092244895306, |
|
"loss": 2.1362, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.20203465223312378, |
|
"learning_rate": 0.00047953294795545845, |
|
"loss": 2.2438, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.19766996800899506, |
|
"learning_rate": 0.0004792734075847871, |
|
"loss": 2.2673, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.18661414086818695, |
|
"learning_rate": 0.0004790123031075275, |
|
"loss": 2.1558, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.20814205706119537, |
|
"learning_rate": 0.00047874963630493804, |
|
"loss": 2.2609, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.20795443654060364, |
|
"learning_rate": 0.00047848540896893577, |
|
"loss": 2.0754, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.20875173807144165, |
|
"learning_rate": 0.00047821962290208326, |
|
"loss": 2.1128, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.18199613690376282, |
|
"learning_rate": 0.00047795227991757715, |
|
"loss": 2.3072, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.18696178495883942, |
|
"learning_rate": 0.00047768338183923527, |
|
"loss": 2.1862, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.1970338076353073, |
|
"learning_rate": 0.0004774129305014842, |
|
"loss": 2.1828, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.19044700264930725, |
|
"learning_rate": 0.00047714092774934705, |
|
"loss": 2.2603, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.20430251955986023, |
|
"learning_rate": 0.00047686737543843063, |
|
"loss": 2.1969, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.31919530034065247, |
|
"learning_rate": 0.0004765922754349128, |
|
"loss": 2.023, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.18964442610740662, |
|
"learning_rate": 0.0004763156296155299, |
|
"loss": 2.3088, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.1777138113975525, |
|
"learning_rate": 0.0004760374398675638, |
|
"loss": 2.2829, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.19124820828437805, |
|
"learning_rate": 0.0004757577080888291, |
|
"loss": 2.332, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.20289361476898193, |
|
"learning_rate": 0.0004754764361876602, |
|
"loss": 2.3203, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.2134077399969101, |
|
"learning_rate": 0.0004751936260828982, |
|
"loss": 2.3119, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.2681266963481903, |
|
"learning_rate": 0.00047490927970387766, |
|
"loss": 2.1415, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.2137921005487442, |
|
"learning_rate": 0.000474623398990414, |
|
"loss": 2.1536, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.2895020842552185, |
|
"learning_rate": 0.0004743359858927895, |
|
"loss": 2.0187, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.193098783493042, |
|
"learning_rate": 0.0004740470423717407, |
|
"loss": 2.0446, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.1691964864730835, |
|
"learning_rate": 0.00047375657039844455, |
|
"loss": 2.2179, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.18233279883861542, |
|
"learning_rate": 0.0004734645719545051, |
|
"loss": 2.2694, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.2980597913265228, |
|
"learning_rate": 0.0004731710490319403, |
|
"loss": 2.0281, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.21693329513072968, |
|
"learning_rate": 0.0004728760036331676, |
|
"loss": 2.2698, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.20274919271469116, |
|
"learning_rate": 0.0004725794377709912, |
|
"loss": 2.4936, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.2515089213848114, |
|
"learning_rate": 0.0004722813534685879, |
|
"loss": 2.1535, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.20583480596542358, |
|
"learning_rate": 0.00047198175275949315, |
|
"loss": 2.1036, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.18232561647891998, |
|
"learning_rate": 0.0004716806376875874, |
|
"loss": 2.3023, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.21218007802963257, |
|
"learning_rate": 0.00047137801030708217, |
|
"loss": 2.3002, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.2054254710674286, |
|
"learning_rate": 0.0004710738726825059, |
|
"loss": 2.3362, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.21346160769462585, |
|
"learning_rate": 0.00047076822688869006, |
|
"loss": 2.0738, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.18373315036296844, |
|
"learning_rate": 0.00047046107501075475, |
|
"loss": 2.2725, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.1660277098417282, |
|
"learning_rate": 0.0004701524191440947, |
|
"loss": 2.2974, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.20062261819839478, |
|
"learning_rate": 0.0004698422613943647, |
|
"loss": 2.4897, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.20721417665481567, |
|
"learning_rate": 0.00046953060387746557, |
|
"loss": 2.2635, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.22309726476669312, |
|
"learning_rate": 0.00046921744871952954, |
|
"loss": 2.2008, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.19087284803390503, |
|
"learning_rate": 0.00046890279805690565, |
|
"loss": 2.1818, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.1862783133983612, |
|
"learning_rate": 0.00046858665403614556, |
|
"loss": 2.2324, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.21848712861537933, |
|
"learning_rate": 0.0004682690188139883, |
|
"loss": 2.4289, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.20349426567554474, |
|
"learning_rate": 0.00046794989455734617, |
|
"loss": 2.3544, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.22449104487895966, |
|
"learning_rate": 0.00046762928344328957, |
|
"loss": 2.33, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.20320549607276917, |
|
"learning_rate": 0.0004673071876590322, |
|
"loss": 2.3229, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.33305850625038147, |
|
"learning_rate": 0.00046698360940191643, |
|
"loss": 2.3729, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.20021797716617584, |
|
"learning_rate": 0.00046665855087939777, |
|
"loss": 2.2189, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.18848876655101776, |
|
"learning_rate": 0.0004663320143090304, |
|
"loss": 2.1184, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.2134588062763214, |
|
"learning_rate": 0.00046600400191845157, |
|
"loss": 2.3574, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.20047910511493683, |
|
"learning_rate": 0.0004656745159453667, |
|
"loss": 2.4937, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.2156955897808075, |
|
"learning_rate": 0.00046534355863753397, |
|
"loss": 2.443, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.2098633348941803, |
|
"learning_rate": 0.00046501113225274915, |
|
"loss": 2.5053, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.20685449242591858, |
|
"learning_rate": 0.00046467723905882984, |
|
"loss": 2.3909, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.21232719719409943, |
|
"learning_rate": 0.00046434188133360045, |
|
"loss": 2.2625, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.1747647374868393, |
|
"learning_rate": 0.00046400506136487626, |
|
"loss": 2.0894, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.30385705828666687, |
|
"learning_rate": 0.00046366678145044814, |
|
"loss": 2.3737, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.21601806581020355, |
|
"learning_rate": 0.0004633270438980668, |
|
"loss": 2.2979, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.19758005440235138, |
|
"learning_rate": 0.0004629858510254267, |
|
"loss": 2.346, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.23565934598445892, |
|
"learning_rate": 0.0004626432051601507, |
|
"loss": 2.3861, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.20269787311553955, |
|
"learning_rate": 0.0004622991086397741, |
|
"loss": 2.2194, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.17142094671726227, |
|
"learning_rate": 0.00046195356381172834, |
|
"loss": 2.2701, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.1847238689661026, |
|
"learning_rate": 0.0004616065730333254, |
|
"loss": 2.0997, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.2138655036687851, |
|
"learning_rate": 0.00046125813867174155, |
|
"loss": 2.5526, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.23903928697109222, |
|
"learning_rate": 0.00046090826310400115, |
|
"loss": 2.4783, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.26335886120796204, |
|
"learning_rate": 0.0004605569487169605, |
|
"loss": 2.394, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.2013441026210785, |
|
"learning_rate": 0.0004602041979072916, |
|
"loss": 2.2272, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.17305096983909607, |
|
"learning_rate": 0.0004598500130814657, |
|
"loss": 2.4204, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.1825268715620041, |
|
"learning_rate": 0.00045949439665573686, |
|
"loss": 2.2598, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.21777556836605072, |
|
"learning_rate": 0.00045913735105612577, |
|
"loss": 2.1326, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.23001907765865326, |
|
"learning_rate": 0.00045877887871840264, |
|
"loss": 2.4948, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.20269040763378143, |
|
"learning_rate": 0.0004584189820880712, |
|
"loss": 2.127, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.1827128827571869, |
|
"learning_rate": 0.0004580576636203515, |
|
"loss": 2.2534, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.17216241359710693, |
|
"learning_rate": 0.0004576949257801636, |
|
"loss": 2.4425, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.19745908677577972, |
|
"learning_rate": 0.0004573307710421103, |
|
"loss": 2.3759, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.21618859469890594, |
|
"learning_rate": 0.00045696520189046077, |
|
"loss": 2.2339, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.18233755230903625, |
|
"learning_rate": 0.00045659822081913306, |
|
"loss": 2.2468, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.17987078428268433, |
|
"learning_rate": 0.00045622983033167755, |
|
"loss": 2.2023, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.20248466730117798, |
|
"learning_rate": 0.0004558600329412596, |
|
"loss": 2.1912, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.20506608486175537, |
|
"learning_rate": 0.0004554888311706425, |
|
"loss": 2.3273, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.22549524903297424, |
|
"learning_rate": 0.0004551162275521702, |
|
"loss": 2.1894, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.18408866226673126, |
|
"learning_rate": 0.00045474222462775016, |
|
"loss": 2.3541, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.2189367264509201, |
|
"learning_rate": 0.00045436682494883585, |
|
"loss": 2.119, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.1773710697889328, |
|
"learning_rate": 0.00045399003107640944, |
|
"loss": 2.3584, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.1946442872285843, |
|
"learning_rate": 0.00045361184558096433, |
|
"loss": 2.237, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.2449074685573578, |
|
"learning_rate": 0.00045323227104248763, |
|
"loss": 2.2493, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.1789473295211792, |
|
"learning_rate": 0.00045285131005044245, |
|
"loss": 2.1959, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.20725607872009277, |
|
"learning_rate": 0.00045246896520375035, |
|
"loss": 2.3345, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.21160945296287537, |
|
"learning_rate": 0.0004520852391107734, |
|
"loss": 2.2686, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.20865868031978607, |
|
"learning_rate": 0.0004517001343892969, |
|
"loss": 2.2881, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.23047909140586853, |
|
"learning_rate": 0.00045131365366651085, |
|
"loss": 2.1912, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.2568853795528412, |
|
"learning_rate": 0.0004509257995789925, |
|
"loss": 2.4391, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.17928802967071533, |
|
"learning_rate": 0.0004505365747726882, |
|
"loss": 2.3539, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.20399945974349976, |
|
"learning_rate": 0.00045014598190289536, |
|
"loss": 2.1629, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.20776867866516113, |
|
"learning_rate": 0.0004497540236342443, |
|
"loss": 2.3111, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.2226899415254593, |
|
"learning_rate": 0.00044936070264068017, |
|
"loss": 2.37, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.1808883249759674, |
|
"learning_rate": 0.00044896602160544467, |
|
"loss": 2.0685, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.21628347039222717, |
|
"learning_rate": 0.00044856998322105764, |
|
"loss": 2.2747, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.21809974312782288, |
|
"learning_rate": 0.0004481725901892988, |
|
"loss": 2.4779, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.2099200189113617, |
|
"learning_rate": 0.00044777384522118926, |
|
"loss": 2.1749, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.22972367703914642, |
|
"learning_rate": 0.0004473737510369732, |
|
"loss": 2.4255, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3171684443950653, |
|
"learning_rate": 0.0004469723103660991, |
|
"loss": 2.2226, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.23672665655612946, |
|
"learning_rate": 0.000446569525947201, |
|
"loss": 2.3501, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.20475780963897705, |
|
"learning_rate": 0.0004461654005280804, |
|
"loss": 2.3695, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.1991017460823059, |
|
"learning_rate": 0.00044575993686568675, |
|
"loss": 2.2487, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.24624770879745483, |
|
"learning_rate": 0.00044535313772609925, |
|
"loss": 2.3519, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.17691905796527863, |
|
"learning_rate": 0.0004449450058845077, |
|
"loss": 2.3415, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.19319698214530945, |
|
"learning_rate": 0.0004445355441251935, |
|
"loss": 2.2697, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.20446337759494781, |
|
"learning_rate": 0.0004441247552415107, |
|
"loss": 2.2208, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.22464382648468018, |
|
"learning_rate": 0.00044371264203586753, |
|
"loss": 2.2629, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.22489066421985626, |
|
"learning_rate": 0.0004432992073197062, |
|
"loss": 2.3359, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.19456712901592255, |
|
"learning_rate": 0.0004428844539134844, |
|
"loss": 2.196, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.22781945765018463, |
|
"learning_rate": 0.00044246838464665606, |
|
"loss": 2.3108, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.20936964452266693, |
|
"learning_rate": 0.00044205100235765194, |
|
"loss": 2.1782, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.1924324333667755, |
|
"learning_rate": 0.0004416323098938602, |
|
"loss": 2.297, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.18653331696987152, |
|
"learning_rate": 0.00044121231011160704, |
|
"loss": 2.2042, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.24487438797950745, |
|
"learning_rate": 0.0004407910058761372, |
|
"loss": 2.2889, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.19584549963474274, |
|
"learning_rate": 0.00044036840006159443, |
|
"loss": 2.3466, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.21697990596294403, |
|
"learning_rate": 0.00043994449555100193, |
|
"loss": 2.2742, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.2454603612422943, |
|
"learning_rate": 0.00043951929523624233, |
|
"loss": 2.4419, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.24156726896762848, |
|
"learning_rate": 0.0004390928020180388, |
|
"loss": 2.1757, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.1978321224451065, |
|
"learning_rate": 0.00043866501880593416, |
|
"loss": 2.0642, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.18749172985553741, |
|
"learning_rate": 0.000438235948518272, |
|
"loss": 2.3689, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.25640445947647095, |
|
"learning_rate": 0.000437805594082176, |
|
"loss": 2.2029, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.21351024508476257, |
|
"learning_rate": 0.0004373739584335308, |
|
"loss": 2.4231, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.1982639878988266, |
|
"learning_rate": 0.0004369410445169608, |
|
"loss": 2.2831, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.31549686193466187, |
|
"learning_rate": 0.00043650685528581155, |
|
"loss": 2.3625, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.26613035798072815, |
|
"learning_rate": 0.00043607139370212814, |
|
"loss": 2.1986, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.20148108899593353, |
|
"learning_rate": 0.0004356346627366361, |
|
"loss": 2.2001, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.20938289165496826, |
|
"learning_rate": 0.0004351966653687206, |
|
"loss": 2.196, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.19232477247714996, |
|
"learning_rate": 0.00043475740458640607, |
|
"loss": 2.2149, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.23008297383785248, |
|
"learning_rate": 0.0004343168833863361, |
|
"loss": 2.2319, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.1995578110218048, |
|
"learning_rate": 0.0004338751047737529, |
|
"loss": 2.3673, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.17232060432434082, |
|
"learning_rate": 0.0004334320717624767, |
|
"loss": 2.1701, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.25711289048194885, |
|
"learning_rate": 0.0004329877873748853, |
|
"loss": 2.1933, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.2659030854701996, |
|
"learning_rate": 0.00043254225464189335, |
|
"loss": 1.9466, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.22041670978069305, |
|
"learning_rate": 0.00043209547660293167, |
|
"loss": 2.0898, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.18253591656684875, |
|
"learning_rate": 0.00043164745630592686, |
|
"loss": 2.2224, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.21765796840190887, |
|
"learning_rate": 0.00043119819680728, |
|
"loss": 2.2998, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.2172078639268875, |
|
"learning_rate": 0.00043074770117184594, |
|
"loss": 2.2644, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.19673798978328705, |
|
"learning_rate": 0.0004302959724729127, |
|
"loss": 2.1841, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.20035311579704285, |
|
"learning_rate": 0.0004298430137921801, |
|
"loss": 2.4498, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.18350964784622192, |
|
"learning_rate": 0.000429388828219739, |
|
"loss": 2.3168, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.18951267004013062, |
|
"learning_rate": 0.00042893341885405007, |
|
"loss": 2.2359, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.19564704596996307, |
|
"learning_rate": 0.0004284767888019229, |
|
"loss": 2.4583, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.1913597732782364, |
|
"learning_rate": 0.00042801894117849425, |
|
"loss": 2.2388, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.22768037021160126, |
|
"learning_rate": 0.00042755987910720765, |
|
"loss": 2.4552, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.20260490477085114, |
|
"learning_rate": 0.00042709960571979123, |
|
"loss": 2.4709, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.2456539422273636, |
|
"learning_rate": 0.00042663812415623685, |
|
"loss": 2.0556, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.20906373858451843, |
|
"learning_rate": 0.00042617543756477867, |
|
"loss": 2.2327, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.30633488297462463, |
|
"learning_rate": 0.0004257115491018714, |
|
"loss": 2.3643, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.2021452635526657, |
|
"learning_rate": 0.00042524646193216906, |
|
"loss": 2.2815, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.20578056573867798, |
|
"learning_rate": 0.0004247801792285032, |
|
"loss": 2.1693, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.24151162803173065, |
|
"learning_rate": 0.0004243127041718614, |
|
"loss": 2.4737, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.2197379320859909, |
|
"learning_rate": 0.00042384403995136534, |
|
"loss": 2.4527, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.24394835531711578, |
|
"learning_rate": 0.00042337418976424936, |
|
"loss": 2.3551, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.2169724851846695, |
|
"learning_rate": 0.0004229031568158383, |
|
"loss": 2.2993, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.193971648812294, |
|
"learning_rate": 0.0004224309443195261, |
|
"loss": 2.3481, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.22565196454524994, |
|
"learning_rate": 0.00042195755549675324, |
|
"loss": 2.4907, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.24645136296749115, |
|
"learning_rate": 0.0004214829935769854, |
|
"loss": 2.2397, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.18104226887226105, |
|
"learning_rate": 0.000421007261797691, |
|
"loss": 2.2246, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.17285558581352234, |
|
"learning_rate": 0.0004205303634043194, |
|
"loss": 2.1925, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.17167840898036957, |
|
"learning_rate": 0.0004200523016502783, |
|
"loss": 2.2859, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.2422027587890625, |
|
"learning_rate": 0.00041957307979691226, |
|
"loss": 2.1167, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.22238920629024506, |
|
"learning_rate": 0.00041909270111347986, |
|
"loss": 2.1963, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.31004032492637634, |
|
"learning_rate": 0.0004186111688771315, |
|
"loss": 2.0878, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.20740267634391785, |
|
"learning_rate": 0.00041812848637288736, |
|
"loss": 2.2541, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.220609650015831, |
|
"learning_rate": 0.00041764465689361454, |
|
"loss": 2.3849, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.24316507577896118, |
|
"learning_rate": 0.000417159683740005, |
|
"loss": 2.2295, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.2829028069972992, |
|
"learning_rate": 0.0004166735702205527, |
|
"loss": 2.2568, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.20122820138931274, |
|
"learning_rate": 0.0004161863196515314, |
|
"loss": 2.2641, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.23964501917362213, |
|
"learning_rate": 0.00041569793535697165, |
|
"loss": 2.3745, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.20462176203727722, |
|
"learning_rate": 0.0004152084206686384, |
|
"loss": 2.3948, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.21822762489318848, |
|
"learning_rate": 0.00041471777892600805, |
|
"loss": 2.4378, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.18578322231769562, |
|
"learning_rate": 0.0004142260134762459, |
|
"loss": 2.2125, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.1855502426624298, |
|
"learning_rate": 0.0004137331276741831, |
|
"loss": 2.1584, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.2354944944381714, |
|
"learning_rate": 0.00041323912488229394, |
|
"loss": 2.4169, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.2282811552286148, |
|
"learning_rate": 0.0004127440084706727, |
|
"loss": 2.1559, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.2994524836540222, |
|
"learning_rate": 0.0004122477818170111, |
|
"loss": 1.9802, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.17210961878299713, |
|
"learning_rate": 0.00041175044830657454, |
|
"loss": 2.1983, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.21259650588035583, |
|
"learning_rate": 0.0004112520113321797, |
|
"loss": 2.184, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.1957891881465912, |
|
"learning_rate": 0.00041075247429417095, |
|
"loss": 2.3489, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.202979177236557, |
|
"learning_rate": 0.00041025184060039743, |
|
"loss": 2.3709, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.1887211799621582, |
|
"learning_rate": 0.0004097501136661896, |
|
"loss": 2.4586, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.1808994561433792, |
|
"learning_rate": 0.00040924729691433606, |
|
"loss": 2.3032, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.20630981028079987, |
|
"learning_rate": 0.0004087433937750601, |
|
"loss": 2.3186, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.2361355423927307, |
|
"learning_rate": 0.00040823840768599656, |
|
"loss": 2.1686, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.23437006771564484, |
|
"learning_rate": 0.0004077323420921679, |
|
"loss": 2.2247, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.20080877840518951, |
|
"learning_rate": 0.00040722520044596114, |
|
"loss": 2.453, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.22303621470928192, |
|
"learning_rate": 0.0004067169862071041, |
|
"loss": 2.095, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.1971607655286789, |
|
"learning_rate": 0.0004062077028426419, |
|
"loss": 2.2782, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.22378380596637726, |
|
"learning_rate": 0.0004056973538269132, |
|
"loss": 2.2542, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.1935906559228897, |
|
"learning_rate": 0.00040518594264152654, |
|
"loss": 2.1513, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.288876473903656, |
|
"learning_rate": 0.00040467347277533655, |
|
"loss": 2.2316, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.2500283122062683, |
|
"learning_rate": 0.0004041599477244204, |
|
"loss": 2.3276, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.2323485165834427, |
|
"learning_rate": 0.0004036453709920535, |
|
"loss": 2.3284, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.22176632285118103, |
|
"learning_rate": 0.000403129746088686, |
|
"loss": 2.245, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.21016566455364227, |
|
"learning_rate": 0.0004026130765319187, |
|
"loss": 2.2102, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.22619667649269104, |
|
"learning_rate": 0.00040209536584647875, |
|
"loss": 2.4151, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.2051662653684616, |
|
"learning_rate": 0.00040157661756419646, |
|
"loss": 2.2177, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.20449289679527283, |
|
"learning_rate": 0.00040105683522398004, |
|
"loss": 2.219, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.20390522480010986, |
|
"learning_rate": 0.00040053602237179246, |
|
"loss": 2.1071, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.20242749154567719, |
|
"learning_rate": 0.00040001418256062674, |
|
"loss": 2.0979, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.22173428535461426, |
|
"learning_rate": 0.0003994913193504817, |
|
"loss": 2.3197, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.22555288672447205, |
|
"learning_rate": 0.000398967436308338, |
|
"loss": 2.4671, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.2446102499961853, |
|
"learning_rate": 0.0003984425370081335, |
|
"loss": 2.3669, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.20918509364128113, |
|
"learning_rate": 0.000397916625030739, |
|
"loss": 2.4581, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.2591955363750458, |
|
"learning_rate": 0.00039738970396393386, |
|
"loss": 2.2427, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.21759961545467377, |
|
"learning_rate": 0.0003968617774023814, |
|
"loss": 2.3986, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.29073455929756165, |
|
"learning_rate": 0.0003963328489476045, |
|
"loss": 2.4886, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.1800486147403717, |
|
"learning_rate": 0.00039580292220796077, |
|
"loss": 2.4326, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.23151837289333344, |
|
"learning_rate": 0.0003952720007986185, |
|
"loss": 2.3614, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.24493078887462616, |
|
"learning_rate": 0.0003947400883415313, |
|
"loss": 2.4108, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.18288132548332214, |
|
"learning_rate": 0.00039420718846541384, |
|
"loss": 2.3391, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.22715741395950317, |
|
"learning_rate": 0.000393673304805717, |
|
"loss": 2.1612, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.19885744154453278, |
|
"learning_rate": 0.0003931384410046031, |
|
"loss": 2.2497, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.20505961775779724, |
|
"learning_rate": 0.0003926026007109207, |
|
"loss": 2.368, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.20146214962005615, |
|
"learning_rate": 0.00039206578758018047, |
|
"loss": 2.457, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2406214475631714, |
|
"learning_rate": 0.0003915280052745295, |
|
"loss": 2.2507, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.219930961728096, |
|
"learning_rate": 0.0003909892574627266, |
|
"loss": 2.4327, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.24521400034427643, |
|
"learning_rate": 0.0003904495478201174, |
|
"loss": 1.8198, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.20486341416835785, |
|
"learning_rate": 0.00038990888002860913, |
|
"loss": 2.1002, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.21982859075069427, |
|
"learning_rate": 0.0003893672577766453, |
|
"loss": 2.2058, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.21171775460243225, |
|
"learning_rate": 0.0003888246847591811, |
|
"loss": 2.3034, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.23152461647987366, |
|
"learning_rate": 0.0003882811646776577, |
|
"loss": 2.2811, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2419670820236206, |
|
"learning_rate": 0.0003877367012399772, |
|
"loss": 2.3064, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.2318524569272995, |
|
"learning_rate": 0.00038719129816047705, |
|
"loss": 2.3313, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.26805251836776733, |
|
"learning_rate": 0.0003866449591599053, |
|
"loss": 2.3949, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.20099863409996033, |
|
"learning_rate": 0.00038609768796539474, |
|
"loss": 2.4194, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.2393646538257599, |
|
"learning_rate": 0.00038554948831043755, |
|
"loss": 1.9689, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.2404375821352005, |
|
"learning_rate": 0.0003850003639348598, |
|
"loss": 2.1439, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.22678737342357635, |
|
"learning_rate": 0.0003844503185847964, |
|
"loss": 2.2073, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.2190311998128891, |
|
"learning_rate": 0.00038389935601266454, |
|
"loss": 2.3895, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.36514168977737427, |
|
"learning_rate": 0.0003833474799771394, |
|
"loss": 2.1925, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.19080831110477448, |
|
"learning_rate": 0.00038279469424312723, |
|
"loss": 2.2825, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.19559559226036072, |
|
"learning_rate": 0.0003822410025817406, |
|
"loss": 1.9923, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.204594224691391, |
|
"learning_rate": 0.0003816864087702723, |
|
"loss": 2.2341, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.2163582295179367, |
|
"learning_rate": 0.0003811309165921695, |
|
"loss": 2.3263, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.22616557776927948, |
|
"learning_rate": 0.00038057452983700793, |
|
"loss": 2.4578, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.21181915700435638, |
|
"learning_rate": 0.00038001725230046645, |
|
"loss": 2.2881, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.21244682371616364, |
|
"learning_rate": 0.00037945908778430073, |
|
"loss": 2.3482, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.22712396085262299, |
|
"learning_rate": 0.00037890004009631727, |
|
"loss": 2.467, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.20208491384983063, |
|
"learning_rate": 0.00037834011305034774, |
|
"loss": 2.4241, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.23634742200374603, |
|
"learning_rate": 0.00037777931046622273, |
|
"loss": 2.1896, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.20562510192394257, |
|
"learning_rate": 0.0003772176361697458, |
|
"loss": 2.3983, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.19427534937858582, |
|
"learning_rate": 0.0003766550939926674, |
|
"loss": 2.0804, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.22697383165359497, |
|
"learning_rate": 0.00037609168777265843, |
|
"loss": 2.3416, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.2205856442451477, |
|
"learning_rate": 0.0003755274213532847, |
|
"loss": 2.2662, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.24093294143676758, |
|
"learning_rate": 0.0003749622985839799, |
|
"loss": 2.1266, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.2476285696029663, |
|
"learning_rate": 0.0003743963233200201, |
|
"loss": 2.2735, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.23264004290103912, |
|
"learning_rate": 0.00037382949942249696, |
|
"loss": 2.2642, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.20969323813915253, |
|
"learning_rate": 0.00037326183075829126, |
|
"loss": 2.3931, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.23997239768505096, |
|
"learning_rate": 0.0003726933212000474, |
|
"loss": 2.4217, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.25848066806793213, |
|
"learning_rate": 0.0003721239746261458, |
|
"loss": 2.3721, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.1921965628862381, |
|
"learning_rate": 0.0003715537949206773, |
|
"loss": 2.2568, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.19738563895225525, |
|
"learning_rate": 0.0003709827859734163, |
|
"loss": 2.245, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.23472926020622253, |
|
"learning_rate": 0.0003704109516797943, |
|
"loss": 2.2329, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.19815848767757416, |
|
"learning_rate": 0.00036983829594087336, |
|
"loss": 2.5266, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.22582224011421204, |
|
"learning_rate": 0.00036926482266331947, |
|
"loss": 2.2198, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.21971648931503296, |
|
"learning_rate": 0.0003686905357593758, |
|
"loss": 2.2141, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.20423489809036255, |
|
"learning_rate": 0.00036811543914683605, |
|
"loss": 2.3125, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.19768038392066956, |
|
"learning_rate": 0.0003675395367490179, |
|
"loss": 2.364, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.2276119887828827, |
|
"learning_rate": 0.00036696283249473604, |
|
"loss": 2.2745, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.2061648964881897, |
|
"learning_rate": 0.0003663853303182756, |
|
"loss": 2.3182, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.22242146730422974, |
|
"learning_rate": 0.00036580703415936474, |
|
"loss": 2.2696, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.2070762813091278, |
|
"learning_rate": 0.0003652279479631486, |
|
"loss": 2.157, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.250929594039917, |
|
"learning_rate": 0.00036464807568016176, |
|
"loss": 2.1939, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.26398998498916626, |
|
"learning_rate": 0.00036406742126630166, |
|
"loss": 2.6459, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.22033217549324036, |
|
"learning_rate": 0.00036348598868280114, |
|
"loss": 2.5577, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.25079113245010376, |
|
"learning_rate": 0.00036290378189620195, |
|
"loss": 2.227, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.20717765390872955, |
|
"learning_rate": 0.00036232080487832747, |
|
"loss": 2.1095, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.19811852276325226, |
|
"learning_rate": 0.0003617370616062555, |
|
"loss": 2.1457, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.20729579031467438, |
|
"learning_rate": 0.00036115255606229154, |
|
"loss": 2.3231, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.20803453028202057, |
|
"learning_rate": 0.00036056729223394083, |
|
"loss": 2.3809, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.387298583984375, |
|
"learning_rate": 0.00035998127411388187, |
|
"loss": 2.2312, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.2171965390443802, |
|
"learning_rate": 0.0003593945056999391, |
|
"loss": 2.2823, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.19715160131454468, |
|
"learning_rate": 0.00035880699099505536, |
|
"loss": 2.5001, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.2282709926366806, |
|
"learning_rate": 0.00035821873400726463, |
|
"loss": 2.4297, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.20915524661540985, |
|
"learning_rate": 0.0003576297387496648, |
|
"loss": 2.445, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.28219184279441833, |
|
"learning_rate": 0.0003570400092403903, |
|
"loss": 2.2412, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.21221624314785004, |
|
"learning_rate": 0.00035644954950258444, |
|
"loss": 2.2826, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.22089391946792603, |
|
"learning_rate": 0.00035585836356437264, |
|
"loss": 2.1981, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.2844483256340027, |
|
"learning_rate": 0.0003552664554588338, |
|
"loss": 2.3541, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.2894970774650574, |
|
"learning_rate": 0.00035467382922397395, |
|
"loss": 1.8961, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.24491670727729797, |
|
"learning_rate": 0.00035408048890269805, |
|
"loss": 2.2435, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.2114434689283371, |
|
"learning_rate": 0.00035348643854278257, |
|
"loss": 2.2917, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.24633482098579407, |
|
"learning_rate": 0.0003528916821968479, |
|
"loss": 2.363, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.19707679748535156, |
|
"learning_rate": 0.0003522962239223306, |
|
"loss": 2.268, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.1920863687992096, |
|
"learning_rate": 0.00035170006778145583, |
|
"loss": 2.3193, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.19514207541942596, |
|
"learning_rate": 0.0003511032178412098, |
|
"loss": 2.2747, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.23632396757602692, |
|
"learning_rate": 0.00035050567817331154, |
|
"loss": 2.4162, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.20080457627773285, |
|
"learning_rate": 0.0003499074528541855, |
|
"loss": 2.2029, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.20242765545845032, |
|
"learning_rate": 0.00034930854596493374, |
|
"loss": 2.286, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.24630364775657654, |
|
"learning_rate": 0.00034870896159130786, |
|
"loss": 2.3279, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.32333359122276306, |
|
"learning_rate": 0.0003481087038236815, |
|
"loss": 1.89, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.22153715789318085, |
|
"learning_rate": 0.0003475077767570219, |
|
"loss": 2.2267, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.20011873543262482, |
|
"learning_rate": 0.0003469061844908626, |
|
"loss": 2.5615, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.20656999945640564, |
|
"learning_rate": 0.00034630393112927477, |
|
"loss": 2.1442, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.2093532532453537, |
|
"learning_rate": 0.00034570102078083985, |
|
"loss": 2.2969, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.2096518725156784, |
|
"learning_rate": 0.0003450974575586213, |
|
"loss": 2.2671, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.28272372484207153, |
|
"learning_rate": 0.0003444932455801362, |
|
"loss": 2.2979, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.19493138790130615, |
|
"learning_rate": 0.0003438883889673278, |
|
"loss": 2.3548, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.21109020709991455, |
|
"learning_rate": 0.0003432828918465367, |
|
"loss": 2.3726, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.22046278417110443, |
|
"learning_rate": 0.00034267675834847346, |
|
"loss": 2.176, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.2979690432548523, |
|
"learning_rate": 0.0003420699926081897, |
|
"loss": 2.0627, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.2563689649105072, |
|
"learning_rate": 0.00034146259876505013, |
|
"loss": 2.3314, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.19333158433437347, |
|
"learning_rate": 0.00034085458096270484, |
|
"loss": 2.3283, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.2114039808511734, |
|
"learning_rate": 0.00034024594334906014, |
|
"loss": 2.2269, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.22176076471805573, |
|
"learning_rate": 0.00033963669007625085, |
|
"loss": 2.225, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.2213573306798935, |
|
"learning_rate": 0.0003390268253006119, |
|
"loss": 2.2757, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.2310647964477539, |
|
"learning_rate": 0.00033841635318264987, |
|
"loss": 2.2503, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.22906078398227692, |
|
"learning_rate": 0.00033780527788701445, |
|
"loss": 2.4459, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.21090683341026306, |
|
"learning_rate": 0.0003371936035824705, |
|
"loss": 2.3689, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.28067609667778015, |
|
"learning_rate": 0.00033658133444186935, |
|
"loss": 2.2164, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.24392075836658478, |
|
"learning_rate": 0.0003359684746421199, |
|
"loss": 2.1782, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.22776876389980316, |
|
"learning_rate": 0.0003353550283641608, |
|
"loss": 2.4948, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.22832733392715454, |
|
"learning_rate": 0.0003347409997929318, |
|
"loss": 2.3384, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.23752398788928986, |
|
"learning_rate": 0.0003341263931173449, |
|
"loss": 2.0114, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.20741015672683716, |
|
"learning_rate": 0.00033351121253025597, |
|
"loss": 2.3307, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.23571203649044037, |
|
"learning_rate": 0.000332895462228436, |
|
"loss": 2.433, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.21684762835502625, |
|
"learning_rate": 0.00033227914641254273, |
|
"loss": 2.6377, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.3023325502872467, |
|
"learning_rate": 0.00033166226928709185, |
|
"loss": 2.2254, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.24143949151039124, |
|
"learning_rate": 0.0003310448350604283, |
|
"loss": 2.3094, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.20807060599327087, |
|
"learning_rate": 0.0003304268479446974, |
|
"loss": 2.4342, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.21055589616298676, |
|
"learning_rate": 0.0003298083121558165, |
|
"loss": 2.4467, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.22990992665290833, |
|
"learning_rate": 0.000329189231913446, |
|
"loss": 2.512, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.20713160932064056, |
|
"learning_rate": 0.0003285696114409603, |
|
"loss": 2.2196, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.21360722184181213, |
|
"learning_rate": 0.0003279494549654197, |
|
"loss": 2.1737, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.19622068107128143, |
|
"learning_rate": 0.0003273287667175407, |
|
"loss": 2.3779, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.2596871554851532, |
|
"learning_rate": 0.0003267075509316678, |
|
"loss": 2.2926, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.21403126418590546, |
|
"learning_rate": 0.00032608581184574427, |
|
"loss": 2.2394, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.1995462030172348, |
|
"learning_rate": 0.0003254635537012834, |
|
"loss": 2.3674, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.19879235327243805, |
|
"learning_rate": 0.00032484078074333957, |
|
"loss": 2.2684, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.2155233919620514, |
|
"learning_rate": 0.00032421749722047894, |
|
"loss": 2.1418, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.2114148586988449, |
|
"learning_rate": 0.00032359370738475115, |
|
"loss": 2.3007, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.24906907975673676, |
|
"learning_rate": 0.00032296941549165957, |
|
"loss": 2.07, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.21433790028095245, |
|
"learning_rate": 0.00032234462580013303, |
|
"loss": 2.3135, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.21331354975700378, |
|
"learning_rate": 0.0003217193425724957, |
|
"loss": 2.2919, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.21881960332393646, |
|
"learning_rate": 0.00032109357007443926, |
|
"loss": 2.2984, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.24045515060424805, |
|
"learning_rate": 0.0003204673125749929, |
|
"loss": 2.315, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.19170185923576355, |
|
"learning_rate": 0.0003198405743464946, |
|
"loss": 2.1179, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.21934069693088531, |
|
"learning_rate": 0.0003192133596645619, |
|
"loss": 2.347, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.20186646282672882, |
|
"learning_rate": 0.0003185856728080626, |
|
"loss": 2.3665, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.20092110335826874, |
|
"learning_rate": 0.00031795751805908576, |
|
"loss": 2.2575, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.19863560795783997, |
|
"learning_rate": 0.0003173288997029124, |
|
"loss": 2.4422, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.2436000406742096, |
|
"learning_rate": 0.00031669982202798625, |
|
"loss": 2.4637, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.23264776170253754, |
|
"learning_rate": 0.0003160702893258846, |
|
"loss": 2.509, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.23909597098827362, |
|
"learning_rate": 0.00031544030589128886, |
|
"loss": 2.254, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.21964912116527557, |
|
"learning_rate": 0.0003148098760219553, |
|
"loss": 2.3039, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.1836385279893875, |
|
"learning_rate": 0.00031417900401868603, |
|
"loss": 2.0602, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.22736455500125885, |
|
"learning_rate": 0.00031354769418529905, |
|
"loss": 2.4169, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.18662111461162567, |
|
"learning_rate": 0.0003129159508285995, |
|
"loss": 2.1888, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.20640943944454193, |
|
"learning_rate": 0.00031228377825834975, |
|
"loss": 2.2545, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.1896309107542038, |
|
"learning_rate": 0.0003116511807872406, |
|
"loss": 2.2505, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.20263446867465973, |
|
"learning_rate": 0.0003110181627308615, |
|
"loss": 2.1097, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.20662613213062286, |
|
"learning_rate": 0.00031038472840767085, |
|
"loss": 2.3434, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.24093309044837952, |
|
"learning_rate": 0.000309750882138967, |
|
"loss": 2.2969, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.1911349594593048, |
|
"learning_rate": 0.00030911662824885866, |
|
"loss": 2.4546, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.2091955691576004, |
|
"learning_rate": 0.00030848197106423525, |
|
"loss": 2.2451, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.20242448151111603, |
|
"learning_rate": 0.0003078469149147376, |
|
"loss": 2.1631, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.21048593521118164, |
|
"learning_rate": 0.00030721146413272807, |
|
"loss": 1.9984, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.18805581331253052, |
|
"learning_rate": 0.00030657562305326134, |
|
"loss": 2.4625, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.19612343609333038, |
|
"learning_rate": 0.0003059393960140547, |
|
"loss": 2.1259, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.3192237913608551, |
|
"learning_rate": 0.00030530278735545847, |
|
"loss": 2.3196, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.20382805168628693, |
|
"learning_rate": 0.00030466580142042636, |
|
"loss": 2.3005, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.20354492962360382, |
|
"learning_rate": 0.00030402844255448587, |
|
"loss": 1.9433, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.18796123564243317, |
|
"learning_rate": 0.0003033907151057086, |
|
"loss": 2.1679, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.22691605985164642, |
|
"learning_rate": 0.0003027526234246807, |
|
"loss": 2.184, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.19387076795101166, |
|
"learning_rate": 0.00030211417186447304, |
|
"loss": 2.3213, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.23006758093833923, |
|
"learning_rate": 0.0003014753647806117, |
|
"loss": 2.2843, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.2078760713338852, |
|
"learning_rate": 0.0003008362065310481, |
|
"loss": 2.0728, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2124081254005432, |
|
"learning_rate": 0.0003001967014761292, |
|
"loss": 2.1527, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.21508193016052246, |
|
"learning_rate": 0.0002995568539785681, |
|
"loss": 2.3249, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.22573944926261902, |
|
"learning_rate": 0.0002989166684034139, |
|
"loss": 2.5555, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.27325788140296936, |
|
"learning_rate": 0.000298276149118022, |
|
"loss": 2.1355, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2094535082578659, |
|
"learning_rate": 0.0002976353004920245, |
|
"loss": 2.2193, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.1797902137041092, |
|
"learning_rate": 0.0002969941268973003, |
|
"loss": 1.8739, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2198173552751541, |
|
"learning_rate": 0.0002963526327079451, |
|
"loss": 2.4586, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.23765875399112701, |
|
"learning_rate": 0.0002957108223002419, |
|
"loss": 2.2355, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2252362072467804, |
|
"learning_rate": 0.0002950687000526307, |
|
"loss": 2.3512, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2460881769657135, |
|
"learning_rate": 0.0002944262703456789, |
|
"loss": 2.5086, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.18150685727596283, |
|
"learning_rate": 0.0002937835375620518, |
|
"loss": 2.1693, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.21276025474071503, |
|
"learning_rate": 0.00029314050608648175, |
|
"loss": 2.2578, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.21988217532634735, |
|
"learning_rate": 0.00029249718030573905, |
|
"loss": 2.2901, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.22700655460357666, |
|
"learning_rate": 0.00029185356460860185, |
|
"loss": 2.4618, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.24527020752429962, |
|
"learning_rate": 0.00029120966338582583, |
|
"loss": 2.2635, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.21412739157676697, |
|
"learning_rate": 0.00029056548103011473, |
|
"loss": 2.026, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.22026726603507996, |
|
"learning_rate": 0.0002899210219360902, |
|
"loss": 2.2242, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.23526617884635925, |
|
"learning_rate": 0.00028927629050026164, |
|
"loss": 2.466, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.1999528706073761, |
|
"learning_rate": 0.00028863129112099663, |
|
"loss": 2.1298, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.19018436968326569, |
|
"learning_rate": 0.0002879860281984903, |
|
"loss": 2.1991, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.23844487965106964, |
|
"learning_rate": 0.000287340506134736, |
|
"loss": 2.4711, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.22726939618587494, |
|
"learning_rate": 0.00028669472933349485, |
|
"loss": 2.3945, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.24586647748947144, |
|
"learning_rate": 0.00028604870220026566, |
|
"loss": 2.41, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.2375824749469757, |
|
"learning_rate": 0.0002854024291422553, |
|
"loss": 2.0514, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.2294244021177292, |
|
"learning_rate": 0.00028475591456834795, |
|
"loss": 2.2207, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.252311646938324, |
|
"learning_rate": 0.00028410916288907587, |
|
"loss": 2.3079, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.2053811103105545, |
|
"learning_rate": 0.0002834621785165883, |
|
"loss": 2.2646, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.20166157186031342, |
|
"learning_rate": 0.0002828149658646225, |
|
"loss": 2.348, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.20390185713768005, |
|
"learning_rate": 0.0002821675293484726, |
|
"loss": 2.1899, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.23896300792694092, |
|
"learning_rate": 0.0002815198733849602, |
|
"loss": 2.2631, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.2838144302368164, |
|
"learning_rate": 0.00028087200239240403, |
|
"loss": 2.0516, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.2110898345708847, |
|
"learning_rate": 0.0002802239207905894, |
|
"loss": 2.3501, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.18373216688632965, |
|
"learning_rate": 0.00027957563300073864, |
|
"loss": 2.0434, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.2768537998199463, |
|
"learning_rate": 0.0002789271434454807, |
|
"loss": 2.312, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.21091219782829285, |
|
"learning_rate": 0.0002782784565488211, |
|
"loss": 2.2531, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.19119109213352203, |
|
"learning_rate": 0.0002776295767361113, |
|
"loss": 2.3189, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.2107253223657608, |
|
"learning_rate": 0.00027698050843401903, |
|
"loss": 2.3991, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.20837560296058655, |
|
"learning_rate": 0.00027633125607049786, |
|
"loss": 2.2885, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.2551201283931732, |
|
"learning_rate": 0.0002756818240747571, |
|
"loss": 2.3301, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.24259725213050842, |
|
"learning_rate": 0.0002750322168772316, |
|
"loss": 2.1009, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.21995201706886292, |
|
"learning_rate": 0.000274382438909551, |
|
"loss": 2.2959, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.1952590048313141, |
|
"learning_rate": 0.0002737324946045104, |
|
"loss": 2.2697, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.2953265309333801, |
|
"learning_rate": 0.0002730823883960395, |
|
"loss": 2.1697, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.22461754083633423, |
|
"learning_rate": 0.00027243212471917246, |
|
"loss": 2.2691, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.21608883142471313, |
|
"learning_rate": 0.0002717817080100177, |
|
"loss": 2.3351, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.19717416167259216, |
|
"learning_rate": 0.0002711311427057278, |
|
"loss": 2.2855, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.212899848818779, |
|
"learning_rate": 0.0002704804332444688, |
|
"loss": 2.2025, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.2357543259859085, |
|
"learning_rate": 0.0002698295840653903, |
|
"loss": 2.3831, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.2233133614063263, |
|
"learning_rate": 0.0002691785996085952, |
|
"loss": 2.3989, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.22534696757793427, |
|
"learning_rate": 0.0002685274843151089, |
|
"loss": 2.3525, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.21673683822155, |
|
"learning_rate": 0.00026787624262684977, |
|
"loss": 2.3969, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.24868395924568176, |
|
"learning_rate": 0.0002672248789865981, |
|
"loss": 2.369, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.2035851776599884, |
|
"learning_rate": 0.00026657339783796655, |
|
"loss": 2.2208, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.2544965147972107, |
|
"learning_rate": 0.000265921803625369, |
|
"loss": 2.5607, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.17845949530601501, |
|
"learning_rate": 0.00026527010079399084, |
|
"loss": 2.3066, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.22786450386047363, |
|
"learning_rate": 0.00026461829378975846, |
|
"loss": 2.309, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.2404799461364746, |
|
"learning_rate": 0.00026396638705930895, |
|
"loss": 2.5578, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.19382694363594055, |
|
"learning_rate": 0.0002633143850499598, |
|
"loss": 2.3082, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.19829370081424713, |
|
"learning_rate": 0.0002626622922096782, |
|
"loss": 2.4811, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.23021139204502106, |
|
"learning_rate": 0.0002620101129870513, |
|
"loss": 2.2642, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.21615907549858093, |
|
"learning_rate": 0.00026135785183125537, |
|
"loss": 2.2167, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.23788948357105255, |
|
"learning_rate": 0.00026070551319202573, |
|
"loss": 2.3856, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.1953219771385193, |
|
"learning_rate": 0.00026005310151962636, |
|
"loss": 2.2164, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.2500264644622803, |
|
"learning_rate": 0.0002594006212648192, |
|
"loss": 2.4068, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.203148752450943, |
|
"learning_rate": 0.00025874807687883424, |
|
"loss": 2.3142, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.21646182239055634, |
|
"learning_rate": 0.000258095472813339, |
|
"loss": 2.0725, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.19882526993751526, |
|
"learning_rate": 0.00025744281352040814, |
|
"loss": 2.2717, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.22494642436504364, |
|
"learning_rate": 0.00025679010345249306, |
|
"loss": 2.1359, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.22282272577285767, |
|
"learning_rate": 0.0002561373470623914, |
|
"loss": 2.2144, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.20449025928974152, |
|
"learning_rate": 0.000255484548803217, |
|
"loss": 2.1777, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.23310497403144836, |
|
"learning_rate": 0.0002548317131283694, |
|
"loss": 2.1825, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.20909251272678375, |
|
"learning_rate": 0.0002541788444915031, |
|
"loss": 2.22, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.22772087156772614, |
|
"learning_rate": 0.0002535259473464977, |
|
"loss": 2.392, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.26885297894477844, |
|
"learning_rate": 0.00025287302614742714, |
|
"loss": 1.9405, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.2358844131231308, |
|
"learning_rate": 0.00025222008534852956, |
|
"loss": 2.2033, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.20965200662612915, |
|
"learning_rate": 0.00025156712940417684, |
|
"loss": 2.3244, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.20102304220199585, |
|
"learning_rate": 0.0002509141627688441, |
|
"loss": 2.258, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.23354652523994446, |
|
"learning_rate": 0.00025026118989707934, |
|
"loss": 2.2546, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.21666738390922546, |
|
"learning_rate": 0.0002496082152434732, |
|
"loss": 1.8389, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.2279592752456665, |
|
"learning_rate": 0.00024895524326262855, |
|
"loss": 2.2966, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.2545997202396393, |
|
"learning_rate": 0.0002483022784091298, |
|
"loss": 2.3153, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.22384227812290192, |
|
"learning_rate": 0.000247649325137513, |
|
"loss": 2.1932, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.22192451357841492, |
|
"learning_rate": 0.0002469963879022349, |
|
"loss": 2.3922, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.22841066122055054, |
|
"learning_rate": 0.00024634347115764317, |
|
"loss": 2.3009, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.19970761239528656, |
|
"learning_rate": 0.00024569057935794537, |
|
"loss": 2.0438, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.21427589654922485, |
|
"learning_rate": 0.0002450377169571792, |
|
"loss": 1.9967, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.23419593274593353, |
|
"learning_rate": 0.0002443848884091816, |
|
"loss": 2.3208, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.1909918338060379, |
|
"learning_rate": 0.0002437320981675585, |
|
"loss": 2.3367, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.20571142435073853, |
|
"learning_rate": 0.0002430793506856548, |
|
"loss": 2.1166, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.22468572854995728, |
|
"learning_rate": 0.00024242665041652333, |
|
"loss": 2.1951, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.2436290830373764, |
|
"learning_rate": 0.00024177400181289518, |
|
"loss": 2.2756, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.24295543134212494, |
|
"learning_rate": 0.0002411214093271486, |
|
"loss": 2.161, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.2191152274608612, |
|
"learning_rate": 0.00024046887741127943, |
|
"loss": 2.4584, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.23705042898654938, |
|
"learning_rate": 0.00023981641051686986, |
|
"loss": 2.2818, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.20371532440185547, |
|
"learning_rate": 0.00023916401309505893, |
|
"loss": 2.4664, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.2301088273525238, |
|
"learning_rate": 0.00023851168959651147, |
|
"loss": 2.3175, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.2044006735086441, |
|
"learning_rate": 0.000237859444471388, |
|
"loss": 2.2875, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.24112215638160706, |
|
"learning_rate": 0.0002372072821693144, |
|
"loss": 2.4344, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.23517994582653046, |
|
"learning_rate": 0.00023655520713935176, |
|
"loss": 2.2492, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.2575826346874237, |
|
"learning_rate": 0.0002359032238299655, |
|
"loss": 2.1888, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.22532737255096436, |
|
"learning_rate": 0.00023525133668899536, |
|
"loss": 2.3007, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.23337632417678833, |
|
"learning_rate": 0.0002345995501636252, |
|
"loss": 2.2881, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.21395531296730042, |
|
"learning_rate": 0.0002339478687003523, |
|
"loss": 2.2067, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.2543134093284607, |
|
"learning_rate": 0.00023329629674495735, |
|
"loss": 2.4062, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.22206565737724304, |
|
"learning_rate": 0.00023264483874247383, |
|
"loss": 2.1208, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.20878835022449493, |
|
"learning_rate": 0.00023199349913715796, |
|
"loss": 2.2897, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.2773876190185547, |
|
"learning_rate": 0.000231342282372458, |
|
"loss": 2.1967, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.22530554234981537, |
|
"learning_rate": 0.00023069119289098467, |
|
"loss": 2.0708, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.20695586502552032, |
|
"learning_rate": 0.00023004023513447988, |
|
"loss": 2.2647, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.2549115717411041, |
|
"learning_rate": 0.0002293894135437871, |
|
"loss": 2.3193, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.2138691544532776, |
|
"learning_rate": 0.00022873873255882104, |
|
"loss": 2.4966, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.24796932935714722, |
|
"learning_rate": 0.00022808819661853682, |
|
"loss": 2.3338, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.27691391110420227, |
|
"learning_rate": 0.00022743781016090048, |
|
"loss": 2.3752, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.22704118490219116, |
|
"learning_rate": 0.00022678757762285793, |
|
"loss": 2.4301, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.21799629926681519, |
|
"learning_rate": 0.00022613750344030532, |
|
"loss": 2.5869, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.20468707382678986, |
|
"learning_rate": 0.00022548759204805817, |
|
"loss": 2.3938, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.23200130462646484, |
|
"learning_rate": 0.00022483784787982188, |
|
"loss": 2.0689, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.22721296548843384, |
|
"learning_rate": 0.00022418827536816068, |
|
"loss": 2.2533, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.3599529564380646, |
|
"learning_rate": 0.0002235388789444679, |
|
"loss": 2.5117, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.22146733105182648, |
|
"learning_rate": 0.00022288966303893548, |
|
"loss": 2.115, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.20554299652576447, |
|
"learning_rate": 0.00022224063208052408, |
|
"loss": 2.2785, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.2130589336156845, |
|
"learning_rate": 0.0002215917904969326, |
|
"loss": 2.1105, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.21884912252426147, |
|
"learning_rate": 0.00022094314271456779, |
|
"loss": 2.397, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.2159346342086792, |
|
"learning_rate": 0.00022029469315851458, |
|
"loss": 2.1086, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.21349260210990906, |
|
"learning_rate": 0.00021964644625250526, |
|
"loss": 2.2467, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.19596756994724274, |
|
"learning_rate": 0.0002189984064188901, |
|
"loss": 2.199, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.2176314741373062, |
|
"learning_rate": 0.0002183505780786063, |
|
"loss": 2.2719, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.20054878294467926, |
|
"learning_rate": 0.00021770296565114846, |
|
"loss": 2.2418, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.21248690783977509, |
|
"learning_rate": 0.00021705557355453808, |
|
"loss": 2.1406, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.23126669228076935, |
|
"learning_rate": 0.0002164084062052938, |
|
"loss": 2.3417, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.24822832643985748, |
|
"learning_rate": 0.00021576146801840072, |
|
"loss": 2.2398, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.19908185303211212, |
|
"learning_rate": 0.0002151147634072809, |
|
"loss": 2.4964, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.2246454805135727, |
|
"learning_rate": 0.00021446829678376273, |
|
"loss": 2.2703, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.23698629438877106, |
|
"learning_rate": 0.00021382207255805097, |
|
"loss": 2.2822, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.2192452996969223, |
|
"learning_rate": 0.00021317609513869717, |
|
"loss": 2.1605, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.22096799314022064, |
|
"learning_rate": 0.00021253036893256863, |
|
"loss": 2.4929, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.23533667623996735, |
|
"learning_rate": 0.00021188489834481926, |
|
"loss": 2.5708, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.2246759682893753, |
|
"learning_rate": 0.00021123968777885877, |
|
"loss": 2.3319, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.2108616828918457, |
|
"learning_rate": 0.0002105947416363235, |
|
"loss": 2.306, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.2039867490530014, |
|
"learning_rate": 0.00020995006431704532, |
|
"loss": 2.18, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.2507101893424988, |
|
"learning_rate": 0.00020930566021902275, |
|
"loss": 2.4027, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.20139947533607483, |
|
"learning_rate": 0.0002086615337383899, |
|
"loss": 2.2227, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.2274298071861267, |
|
"learning_rate": 0.00020801768926938726, |
|
"loss": 2.3289, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.2205355018377304, |
|
"learning_rate": 0.00020737413120433128, |
|
"loss": 2.1413, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.21190853416919708, |
|
"learning_rate": 0.00020673086393358471, |
|
"loss": 2.0141, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.22356361150741577, |
|
"learning_rate": 0.00020608789184552642, |
|
"loss": 2.0832, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.18460409343242645, |
|
"learning_rate": 0.00020544521932652144, |
|
"loss": 2.2919, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.2091461569070816, |
|
"learning_rate": 0.00020480285076089136, |
|
"loss": 1.8256, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.23582054674625397, |
|
"learning_rate": 0.00020416079053088391, |
|
"loss": 2.3861, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.24472753703594208, |
|
"learning_rate": 0.0002035190430166437, |
|
"loss": 2.4747, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.2311311960220337, |
|
"learning_rate": 0.00020287761259618164, |
|
"loss": 2.2835, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.25772756338119507, |
|
"learning_rate": 0.00020223650364534568, |
|
"loss": 2.2397, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.36200013756752014, |
|
"learning_rate": 0.00020159572053779043, |
|
"loss": 1.8731, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.21725964546203613, |
|
"learning_rate": 0.00020095526764494796, |
|
"loss": 2.3772, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.21761581301689148, |
|
"learning_rate": 0.00020031514933599727, |
|
"loss": 2.4379, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.22678567469120026, |
|
"learning_rate": 0.00019967536997783495, |
|
"loss": 2.1252, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.19343462586402893, |
|
"learning_rate": 0.00019903593393504542, |
|
"loss": 2.3515, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.24662891030311584, |
|
"learning_rate": 0.00019839684556987076, |
|
"loss": 2.1898, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.19625911116600037, |
|
"learning_rate": 0.00019775810924218125, |
|
"loss": 2.2743, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.25908294320106506, |
|
"learning_rate": 0.0001971197293094456, |
|
"loss": 2.4319, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.22597509622573853, |
|
"learning_rate": 0.0001964817101267013, |
|
"loss": 2.1773, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.23188504576683044, |
|
"learning_rate": 0.00019584405604652444, |
|
"loss": 2.3682, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.23394878208637238, |
|
"learning_rate": 0.00019520677141900093, |
|
"loss": 2.0693, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.20653656125068665, |
|
"learning_rate": 0.00019456986059169568, |
|
"loss": 2.2845, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.23298628628253937, |
|
"learning_rate": 0.00019393332790962402, |
|
"loss": 2.454, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.19534069299697876, |
|
"learning_rate": 0.00019329717771522108, |
|
"loss": 2.4001, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.19667889177799225, |
|
"learning_rate": 0.00019266141434831326, |
|
"loss": 2.3783, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.22638565301895142, |
|
"learning_rate": 0.00019202604214608744, |
|
"loss": 2.2708, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.24946092069149017, |
|
"learning_rate": 0.0001913910654430622, |
|
"loss": 2.4107, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.23316776752471924, |
|
"learning_rate": 0.00019075648857105825, |
|
"loss": 2.3116, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.20107421278953552, |
|
"learning_rate": 0.00019012231585916817, |
|
"loss": 2.2918, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.21125708520412445, |
|
"learning_rate": 0.00018948855163372782, |
|
"loss": 2.1628, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.23127441108226776, |
|
"learning_rate": 0.00018885520021828602, |
|
"loss": 2.1008, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.2128669172525406, |
|
"learning_rate": 0.00018822226593357563, |
|
"loss": 2.3034, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.20843705534934998, |
|
"learning_rate": 0.00018758975309748354, |
|
"loss": 2.4882, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.20661096274852753, |
|
"learning_rate": 0.00018695766602502196, |
|
"loss": 2.3373, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.23054824769496918, |
|
"learning_rate": 0.00018632600902829808, |
|
"loss": 2.542, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.2079750895500183, |
|
"learning_rate": 0.00018569478641648535, |
|
"loss": 1.9235, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.22011853754520416, |
|
"learning_rate": 0.00018506400249579383, |
|
"loss": 2.4797, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.19704569876194, |
|
"learning_rate": 0.00018443366156944068, |
|
"loss": 2.2134, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.20672884583473206, |
|
"learning_rate": 0.0001838037679376213, |
|
"loss": 2.2853, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.21415551006793976, |
|
"learning_rate": 0.00018317432589747915, |
|
"loss": 2.316, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.23471008241176605, |
|
"learning_rate": 0.0001825453397430773, |
|
"loss": 2.213, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.22792291641235352, |
|
"learning_rate": 0.00018191681376536844, |
|
"loss": 2.2338, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.20559385418891907, |
|
"learning_rate": 0.0001812887522521664, |
|
"loss": 2.4132, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.19658410549163818, |
|
"learning_rate": 0.00018066115948811595, |
|
"loss": 2.3406, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.27269044518470764, |
|
"learning_rate": 0.0001800340397546643, |
|
"loss": 2.0845, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.19628912210464478, |
|
"learning_rate": 0.00017940739733003148, |
|
"loss": 2.0894, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.20123037695884705, |
|
"learning_rate": 0.0001787812364891816, |
|
"loss": 2.2939, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.2504325211048126, |
|
"learning_rate": 0.00017815556150379297, |
|
"loss": 2.3354, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.211093932390213, |
|
"learning_rate": 0.0001775303766422298, |
|
"loss": 2.2469, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.22017726302146912, |
|
"learning_rate": 0.00017690568616951247, |
|
"loss": 2.0667, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.19698256254196167, |
|
"learning_rate": 0.00017628149434728858, |
|
"loss": 2.1873, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.1986425369977951, |
|
"learning_rate": 0.0001756578054338041, |
|
"loss": 2.3461, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.23417946696281433, |
|
"learning_rate": 0.00017503462368387396, |
|
"loss": 2.1092, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.2174525111913681, |
|
"learning_rate": 0.00017441195334885341, |
|
"loss": 2.3174, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.1901862919330597, |
|
"learning_rate": 0.00017378979867660848, |
|
"loss": 2.341, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.22507810592651367, |
|
"learning_rate": 0.00017316816391148792, |
|
"loss": 2.0203, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.23076196014881134, |
|
"learning_rate": 0.00017254705329429302, |
|
"loss": 2.2094, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.2597287893295288, |
|
"learning_rate": 0.00017192647106224984, |
|
"loss": 2.2167, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.18957456946372986, |
|
"learning_rate": 0.00017130642144897942, |
|
"loss": 2.2965, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.208304300904274, |
|
"learning_rate": 0.00017068690868446957, |
|
"loss": 2.4638, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.23491379618644714, |
|
"learning_rate": 0.00017006793699504537, |
|
"loss": 2.607, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.22672338783740997, |
|
"learning_rate": 0.000169449510603341, |
|
"loss": 2.3012, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.23909202218055725, |
|
"learning_rate": 0.00016883163372827053, |
|
"loss": 2.2488, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.21005770564079285, |
|
"learning_rate": 0.00016821431058499896, |
|
"loss": 2.2493, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.212179496884346, |
|
"learning_rate": 0.00016759754538491422, |
|
"loss": 1.8298, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.23341526091098785, |
|
"learning_rate": 0.00016698134233559736, |
|
"loss": 2.0907, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.2126818150281906, |
|
"learning_rate": 0.000166365705640795, |
|
"loss": 2.229, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.22661077976226807, |
|
"learning_rate": 0.00016575063950038962, |
|
"loss": 2.3294, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.21582266688346863, |
|
"learning_rate": 0.00016513614811037168, |
|
"loss": 2.2743, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.21785694360733032, |
|
"learning_rate": 0.00016452223566281035, |
|
"loss": 2.2205, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.21953895688056946, |
|
"learning_rate": 0.00016390890634582572, |
|
"loss": 2.386, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.23978057503700256, |
|
"learning_rate": 0.00016329616434355933, |
|
"loss": 2.3754, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.22289539873600006, |
|
"learning_rate": 0.0001626840138361462, |
|
"loss": 2.3556, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.2133941650390625, |
|
"learning_rate": 0.00016207245899968633, |
|
"loss": 2.2085, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.22433440387248993, |
|
"learning_rate": 0.00016146150400621565, |
|
"loss": 2.3706, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.22882284224033356, |
|
"learning_rate": 0.00016085115302367844, |
|
"loss": 2.175, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.21862849593162537, |
|
"learning_rate": 0.000160241410215898, |
|
"loss": 2.405, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.2291046679019928, |
|
"learning_rate": 0.00015963227974254891, |
|
"loss": 2.3178, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.25386229157447815, |
|
"learning_rate": 0.00015902376575912814, |
|
"loss": 2.2498, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.23087728023529053, |
|
"learning_rate": 0.0001584158724169273, |
|
"loss": 2.0111, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.35551658272743225, |
|
"learning_rate": 0.00015780860386300366, |
|
"loss": 2.0455, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.2827921509742737, |
|
"learning_rate": 0.00015720196424015237, |
|
"loss": 2.0207, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.21254152059555054, |
|
"learning_rate": 0.00015659595768687787, |
|
"loss": 2.3726, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.24501194059848785, |
|
"learning_rate": 0.0001559905883373659, |
|
"loss": 2.0826, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.25266799330711365, |
|
"learning_rate": 0.00015538586032145523, |
|
"loss": 2.4135, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.21061569452285767, |
|
"learning_rate": 0.00015478177776460922, |
|
"loss": 2.2499, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.2409357875585556, |
|
"learning_rate": 0.00015417834478788817, |
|
"loss": 2.1908, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.2231035977602005, |
|
"learning_rate": 0.00015357556550792064, |
|
"loss": 2.3146, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.21707406640052795, |
|
"learning_rate": 0.00015297344403687594, |
|
"loss": 2.0881, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.25137150287628174, |
|
"learning_rate": 0.00015237198448243565, |
|
"loss": 2.3653, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.21213415265083313, |
|
"learning_rate": 0.0001517711909477658, |
|
"loss": 2.0724, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.22044523060321808, |
|
"learning_rate": 0.00015117106753148864, |
|
"loss": 2.3951, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.21106037497520447, |
|
"learning_rate": 0.0001505716183276553, |
|
"loss": 2.3063, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.19864067435264587, |
|
"learning_rate": 0.00014997284742571687, |
|
"loss": 2.3512, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.2682659327983856, |
|
"learning_rate": 0.00014937475891049754, |
|
"loss": 2.1263, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.22299188375473022, |
|
"learning_rate": 0.0001487773568621659, |
|
"loss": 2.1646, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.1971612125635147, |
|
"learning_rate": 0.00014818064535620755, |
|
"loss": 2.2026, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.230462908744812, |
|
"learning_rate": 0.00014758462846339748, |
|
"loss": 2.0656, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.21233803033828735, |
|
"learning_rate": 0.00014698931024977152, |
|
"loss": 2.0036, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.22545789182186127, |
|
"learning_rate": 0.0001463946947765995, |
|
"loss": 2.3219, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.25745728611946106, |
|
"learning_rate": 0.0001458007861003568, |
|
"loss": 2.1843, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.22463242709636688, |
|
"learning_rate": 0.00014520758827269762, |
|
"loss": 2.253, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.221448615193367, |
|
"learning_rate": 0.00014461510534042593, |
|
"loss": 2.2945, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.2240522801876068, |
|
"learning_rate": 0.0001440233413454693, |
|
"loss": 2.495, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.2830246090888977, |
|
"learning_rate": 0.0001434323003248504, |
|
"loss": 2.1455, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.22345268726348877, |
|
"learning_rate": 0.0001428419863106599, |
|
"loss": 2.1584, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.2145606279373169, |
|
"learning_rate": 0.0001422524033300287, |
|
"loss": 2.2282, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.20531050860881805, |
|
"learning_rate": 0.00014166355540510072, |
|
"loss": 2.3778, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.20582695305347443, |
|
"learning_rate": 0.00014107544655300519, |
|
"loss": 2.364, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.2178577184677124, |
|
"learning_rate": 0.00014048808078582942, |
|
"loss": 2.533, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.2206876426935196, |
|
"learning_rate": 0.0001399014621105914, |
|
"loss": 2.5413, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.24441972374916077, |
|
"learning_rate": 0.00013931559452921247, |
|
"loss": 2.0963, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1869293451309204, |
|
"learning_rate": 0.00013873048203849, |
|
"loss": 2.0351, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.21759897470474243, |
|
"learning_rate": 0.00013814612863006985, |
|
"loss": 2.1202, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.2100670337677002, |
|
"learning_rate": 0.00013756253829041998, |
|
"loss": 2.3812, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.203419029712677, |
|
"learning_rate": 0.00013697971500080207, |
|
"loss": 2.4032, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.24871662259101868, |
|
"learning_rate": 0.00013639766273724542, |
|
"loss": 2.2387, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.24824783205986023, |
|
"learning_rate": 0.0001358163854705191, |
|
"loss": 2.2322, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.21718545258045197, |
|
"learning_rate": 0.0001352358871661052, |
|
"loss": 2.3572, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.22530969977378845, |
|
"learning_rate": 0.00013465617178417202, |
|
"loss": 2.2706, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.3539257049560547, |
|
"learning_rate": 0.0001340772432795463, |
|
"loss": 2.2439, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.22788652777671814, |
|
"learning_rate": 0.00013349910560168688, |
|
"loss": 2.2209, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.24959754943847656, |
|
"learning_rate": 0.0001329217626946576, |
|
"loss": 2.1656, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.2394050508737564, |
|
"learning_rate": 0.0001323452184971004, |
|
"loss": 2.2512, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.207530215382576, |
|
"learning_rate": 0.00013176947694220828, |
|
"loss": 2.3349, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.2458074539899826, |
|
"learning_rate": 0.0001311945419576988, |
|
"loss": 2.3544, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.23957344889640808, |
|
"learning_rate": 0.00013062041746578668, |
|
"loss": 2.1734, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.23613759875297546, |
|
"learning_rate": 0.00013004710738315805, |
|
"loss": 2.1414, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.20104865729808807, |
|
"learning_rate": 0.0001294746156209425, |
|
"loss": 2.2497, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.2077418565750122, |
|
"learning_rate": 0.00012890294608468764, |
|
"loss": 2.0288, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.190739706158638, |
|
"learning_rate": 0.00012833210267433127, |
|
"loss": 2.1426, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.21029919385910034, |
|
"learning_rate": 0.00012776208928417558, |
|
"loss": 2.3764, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.22516466677188873, |
|
"learning_rate": 0.0001271929098028607, |
|
"loss": 2.3506, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.25254198908805847, |
|
"learning_rate": 0.00012662456811333718, |
|
"loss": 2.3135, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.23021924495697021, |
|
"learning_rate": 0.00012605706809284056, |
|
"loss": 2.0895, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.2016134113073349, |
|
"learning_rate": 0.00012549041361286438, |
|
"loss": 2.3918, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.22848759591579437, |
|
"learning_rate": 0.000124924608539134, |
|
"loss": 2.0874, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.24023045599460602, |
|
"learning_rate": 0.00012435965673157992, |
|
"loss": 2.2176, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.2871743440628052, |
|
"learning_rate": 0.00012379556204431184, |
|
"loss": 2.0991, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.24772612750530243, |
|
"learning_rate": 0.000123232328325592, |
|
"loss": 2.3852, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.27144259214401245, |
|
"learning_rate": 0.00012266995941780933, |
|
"loss": 2.2463, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.22941002249717712, |
|
"learning_rate": 0.0001221084591574526, |
|
"loss": 1.9907, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.19429859519004822, |
|
"learning_rate": 0.00012154783137508513, |
|
"loss": 2.3679, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.2515440881252289, |
|
"learning_rate": 0.00012098807989531804, |
|
"loss": 2.2745, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.22819548845291138, |
|
"learning_rate": 0.00012042920853678393, |
|
"loss": 2.3092, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.23505914211273193, |
|
"learning_rate": 0.00011987122111211185, |
|
"loss": 2.237, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.208960622549057, |
|
"learning_rate": 0.00011931412142790005, |
|
"loss": 2.1689, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.24931026995182037, |
|
"learning_rate": 0.00011875791328469088, |
|
"loss": 2.2817, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.2663825452327728, |
|
"learning_rate": 0.0001182026004769446, |
|
"loss": 2.4229, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.32719677686691284, |
|
"learning_rate": 0.0001176481867930134, |
|
"loss": 2.2895, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.19801324605941772, |
|
"learning_rate": 0.00011709467601511565, |
|
"loss": 2.2313, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.2740652859210968, |
|
"learning_rate": 0.0001165420719193101, |
|
"loss": 2.4916, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.228300541639328, |
|
"learning_rate": 0.0001159903782754701, |
|
"loss": 1.8949, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.22289882600307465, |
|
"learning_rate": 0.00011543959884725793, |
|
"loss": 2.32, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.2218303680419922, |
|
"learning_rate": 0.00011488973739209879, |
|
"loss": 2.279, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.2361016869544983, |
|
"learning_rate": 0.00011434079766115594, |
|
"loss": 2.4161, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.2459881603717804, |
|
"learning_rate": 0.0001137927833993043, |
|
"loss": 2.2137, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.205433651804924, |
|
"learning_rate": 0.00011324569834510523, |
|
"loss": 2.4675, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.20708201825618744, |
|
"learning_rate": 0.00011269954623078127, |
|
"loss": 2.4206, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.20594294369220734, |
|
"learning_rate": 0.0001121543307821899, |
|
"loss": 2.1661, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.21572817862033844, |
|
"learning_rate": 0.0001116100557187994, |
|
"loss": 2.3033, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.27235686779022217, |
|
"learning_rate": 0.00011106672475366203, |
|
"loss": 2.4498, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.208398699760437, |
|
"learning_rate": 0.0001105243415933898, |
|
"loss": 2.3131, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.2954976260662079, |
|
"learning_rate": 0.0001099829099381287, |
|
"loss": 2.0314, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.2156749665737152, |
|
"learning_rate": 0.00010944243348153352, |
|
"loss": 2.2011, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.21214473247528076, |
|
"learning_rate": 0.00010890291591074262, |
|
"loss": 2.4135, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.18999165296554565, |
|
"learning_rate": 0.00010836436090635288, |
|
"loss": 2.3648, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.2245589941740036, |
|
"learning_rate": 0.00010782677214239456, |
|
"loss": 2.2032, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.21589384973049164, |
|
"learning_rate": 0.00010729015328630617, |
|
"loss": 2.2427, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.23020033538341522, |
|
"learning_rate": 0.00010675450799890951, |
|
"loss": 2.4896, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.21616196632385254, |
|
"learning_rate": 0.00010621983993438467, |
|
"loss": 2.1164, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.214460089802742, |
|
"learning_rate": 0.0001056861527402452, |
|
"loss": 2.3076, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.217409148812294, |
|
"learning_rate": 0.00010515345005731283, |
|
"loss": 2.3753, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.2724614143371582, |
|
"learning_rate": 0.00010462173551969348, |
|
"loss": 2.409, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.21833041310310364, |
|
"learning_rate": 0.00010409101275475139, |
|
"loss": 2.4145, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.22455398738384247, |
|
"learning_rate": 0.00010356128538308519, |
|
"loss": 2.4656, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.24207107722759247, |
|
"learning_rate": 0.00010303255701850287, |
|
"loss": 2.5042, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.21409133076667786, |
|
"learning_rate": 0.00010250483126799711, |
|
"loss": 2.4075, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.27256593108177185, |
|
"learning_rate": 0.00010197811173172106, |
|
"loss": 2.1344, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.2253335416316986, |
|
"learning_rate": 0.00010145240200296296, |
|
"loss": 2.095, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.22114749252796173, |
|
"learning_rate": 0.00010092770566812245, |
|
"loss": 2.3584, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.20449167490005493, |
|
"learning_rate": 0.00010040402630668575, |
|
"loss": 2.0765, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.2127346694469452, |
|
"learning_rate": 9.988136749120117e-05, |
|
"loss": 2.4296, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.24239586293697357, |
|
"learning_rate": 9.935973278725497e-05, |
|
"loss": 2.2133, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.20855852961540222, |
|
"learning_rate": 9.883912575344689e-05, |
|
"loss": 2.1825, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.22541500627994537, |
|
"learning_rate": 9.831954994136558e-05, |
|
"loss": 2.4416, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.21852771937847137, |
|
"learning_rate": 9.780100889556528e-05, |
|
"loss": 2.1338, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.2524181306362152, |
|
"learning_rate": 9.728350615354037e-05, |
|
"loss": 2.2715, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.223457932472229, |
|
"learning_rate": 9.676704524570257e-05, |
|
"loss": 2.1475, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.20966559648513794, |
|
"learning_rate": 9.625162969535562e-05, |
|
"loss": 2.1721, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.257662296295166, |
|
"learning_rate": 9.5737263018672e-05, |
|
"loss": 2.5223, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.21245898306369781, |
|
"learning_rate": 9.522394872466913e-05, |
|
"loss": 2.5489, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.2315073013305664, |
|
"learning_rate": 9.471169031518445e-05, |
|
"loss": 2.2782, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.21125444769859314, |
|
"learning_rate": 9.420049128485253e-05, |
|
"loss": 2.2031, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.21013091504573822, |
|
"learning_rate": 9.36903551210807e-05, |
|
"loss": 1.8751, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.25472453236579895, |
|
"learning_rate": 9.318128530402548e-05, |
|
"loss": 2.1909, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.20788568258285522, |
|
"learning_rate": 9.26732853065687e-05, |
|
"loss": 2.1907, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.22828350961208344, |
|
"learning_rate": 9.216635859429389e-05, |
|
"loss": 2.173, |
|
"step": 4315 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.2420109063386917, |
|
"learning_rate": 9.16605086254626e-05, |
|
"loss": 2.2958, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.39677414298057556, |
|
"learning_rate": 9.11557388509909e-05, |
|
"loss": 2.0855, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.29712197184562683, |
|
"learning_rate": 9.065205271442542e-05, |
|
"loss": 2.4342, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.22807803750038147, |
|
"learning_rate": 9.014945365192074e-05, |
|
"loss": 2.1303, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.22611750662326813, |
|
"learning_rate": 8.964794509221508e-05, |
|
"loss": 2.3752, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.1972074657678604, |
|
"learning_rate": 8.914753045660706e-05, |
|
"loss": 2.2315, |
|
"step": 4345 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.23331503570079803, |
|
"learning_rate": 8.864821315893307e-05, |
|
"loss": 2.0982, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.20077547430992126, |
|
"learning_rate": 8.814999660554279e-05, |
|
"loss": 2.3864, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.21390070021152496, |
|
"learning_rate": 8.765288419527725e-05, |
|
"loss": 2.4906, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.24563802778720856, |
|
"learning_rate": 8.715687931944449e-05, |
|
"loss": 2.3063, |
|
"step": 4365 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.18615995347499847, |
|
"learning_rate": 8.666198536179718e-05, |
|
"loss": 2.4023, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2110363095998764, |
|
"learning_rate": 8.616820569850922e-05, |
|
"loss": 2.445, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.21250484883785248, |
|
"learning_rate": 8.567554369815286e-05, |
|
"loss": 2.2042, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.21482475101947784, |
|
"learning_rate": 8.518400272167554e-05, |
|
"loss": 2.0429, |
|
"step": 4385 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2096213549375534, |
|
"learning_rate": 8.46935861223771e-05, |
|
"loss": 2.3407, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2219087928533554, |
|
"learning_rate": 8.420429724588682e-05, |
|
"loss": 2.2471, |
|
"step": 4395 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.22424234449863434, |
|
"learning_rate": 8.371613943014076e-05, |
|
"loss": 2.443, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.23273713886737823, |
|
"learning_rate": 8.32291160053587e-05, |
|
"loss": 2.0754, |
|
"step": 4405 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.20582637190818787, |
|
"learning_rate": 8.274323029402173e-05, |
|
"loss": 2.3168, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.24268639087677002, |
|
"learning_rate": 8.225848561084942e-05, |
|
"loss": 2.3469, |
|
"step": 4415 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.27532529830932617, |
|
"learning_rate": 8.177488526277696e-05, |
|
"loss": 2.1499, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.19710592925548553, |
|
"learning_rate": 8.129243254893343e-05, |
|
"loss": 2.3333, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.22231920063495636, |
|
"learning_rate": 8.081113076061816e-05, |
|
"loss": 2.617, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.24639876186847687, |
|
"learning_rate": 8.033098318127919e-05, |
|
"loss": 2.3569, |
|
"step": 4435 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.2539924383163452, |
|
"learning_rate": 7.985199308649038e-05, |
|
"loss": 2.3275, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.2054421603679657, |
|
"learning_rate": 7.937416374392931e-05, |
|
"loss": 2.2305, |
|
"step": 4445 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.21331799030303955, |
|
"learning_rate": 7.88974984133548e-05, |
|
"loss": 2.4914, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.2482718825340271, |
|
"learning_rate": 7.842200034658486e-05, |
|
"loss": 2.4796, |
|
"step": 4455 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.2408265769481659, |
|
"learning_rate": 7.794767278747434e-05, |
|
"loss": 2.246, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.2019105851650238, |
|
"learning_rate": 7.747451897189287e-05, |
|
"loss": 2.4315, |
|
"step": 4465 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.23757004737854004, |
|
"learning_rate": 7.70025421277028e-05, |
|
"loss": 2.3275, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.21571137011051178, |
|
"learning_rate": 7.653174547473717e-05, |
|
"loss": 2.1763, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.24419759213924408, |
|
"learning_rate": 7.606213222477784e-05, |
|
"loss": 2.1517, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.23553812503814697, |
|
"learning_rate": 7.559370558153308e-05, |
|
"loss": 2.2369, |
|
"step": 4485 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.25342613458633423, |
|
"learning_rate": 7.512646874061677e-05, |
|
"loss": 2.1151, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.21691171824932098, |
|
"learning_rate": 7.466042488952521e-05, |
|
"loss": 2.121, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.2163485735654831, |
|
"learning_rate": 7.419557720761678e-05, |
|
"loss": 2.3613, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.23227451741695404, |
|
"learning_rate": 7.373192886608907e-05, |
|
"loss": 2.351, |
|
"step": 4505 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.2794537842273712, |
|
"learning_rate": 7.326948302795792e-05, |
|
"loss": 2.2958, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.25960350036621094, |
|
"learning_rate": 7.280824284803595e-05, |
|
"loss": 2.3849, |
|
"step": 4515 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.2537117898464203, |
|
"learning_rate": 7.234821147291023e-05, |
|
"loss": 2.2611, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.2555319666862488, |
|
"learning_rate": 7.188939204092177e-05, |
|
"loss": 2.2563, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.2340226173400879, |
|
"learning_rate": 7.143178768214348e-05, |
|
"loss": 2.2398, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.18839353322982788, |
|
"learning_rate": 7.097540151835913e-05, |
|
"loss": 1.9488, |
|
"step": 4535 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.2070079892873764, |
|
"learning_rate": 7.052023666304189e-05, |
|
"loss": 2.3041, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.20171380043029785, |
|
"learning_rate": 7.006629622133318e-05, |
|
"loss": 2.2266, |
|
"step": 4545 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.22852955758571625, |
|
"learning_rate": 6.961358329002134e-05, |
|
"loss": 2.2571, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.2285429835319519, |
|
"learning_rate": 6.916210095752091e-05, |
|
"loss": 2.0589, |
|
"step": 4555 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.22552116215229034, |
|
"learning_rate": 6.871185230385072e-05, |
|
"loss": 2.2549, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.3575563132762909, |
|
"learning_rate": 6.826284040061407e-05, |
|
"loss": 2.1802, |
|
"step": 4565 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.20591920614242554, |
|
"learning_rate": 6.781506831097678e-05, |
|
"loss": 2.4576, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.23244622349739075, |
|
"learning_rate": 6.736853908964647e-05, |
|
"loss": 2.4535, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.23467615246772766, |
|
"learning_rate": 6.692325578285241e-05, |
|
"loss": 2.3651, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.2762286365032196, |
|
"learning_rate": 6.647922142832366e-05, |
|
"loss": 2.321, |
|
"step": 4585 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.23728686571121216, |
|
"learning_rate": 6.603643905526947e-05, |
|
"loss": 2.2093, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.24722512066364288, |
|
"learning_rate": 6.559491168435749e-05, |
|
"loss": 2.2426, |
|
"step": 4595 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.1793704330921173, |
|
"learning_rate": 6.51546423276941e-05, |
|
"loss": 2.0517, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.2555958926677704, |
|
"learning_rate": 6.47156339888034e-05, |
|
"loss": 2.1139, |
|
"step": 4605 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.23529459536075592, |
|
"learning_rate": 6.427788966260675e-05, |
|
"loss": 2.2764, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.24994830787181854, |
|
"learning_rate": 6.384141233540248e-05, |
|
"loss": 2.135, |
|
"step": 4615 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.21067097783088684, |
|
"learning_rate": 6.340620498484542e-05, |
|
"loss": 2.2185, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.2730359435081482, |
|
"learning_rate": 6.297227057992638e-05, |
|
"loss": 1.9392, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.22600042819976807, |
|
"learning_rate": 6.253961208095255e-05, |
|
"loss": 2.2381, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.20506155490875244, |
|
"learning_rate": 6.210823243952654e-05, |
|
"loss": 2.2401, |
|
"step": 4635 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.21911026537418365, |
|
"learning_rate": 6.167813459852665e-05, |
|
"loss": 2.349, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.3007972240447998, |
|
"learning_rate": 6.124932149208681e-05, |
|
"loss": 2.3174, |
|
"step": 4645 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.2639479339122772, |
|
"learning_rate": 6.082179604557617e-05, |
|
"loss": 2.191, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.35899296402931213, |
|
"learning_rate": 6.0395561175579984e-05, |
|
"loss": 2.3853, |
|
"step": 4655 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.22475650906562805, |
|
"learning_rate": 5.997061978987861e-05, |
|
"loss": 2.6994, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.22194553911685944, |
|
"learning_rate": 5.95469747874286e-05, |
|
"loss": 2.3443, |
|
"step": 4665 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.2621094286441803, |
|
"learning_rate": 5.912462905834243e-05, |
|
"loss": 2.3218, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.22809450328350067, |
|
"learning_rate": 5.8703585483868923e-05, |
|
"loss": 2.0625, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.24936749041080475, |
|
"learning_rate": 5.828384693637356e-05, |
|
"loss": 2.2313, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.22074222564697266, |
|
"learning_rate": 5.786541627931899e-05, |
|
"loss": 2.4042, |
|
"step": 4685 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.2226363569498062, |
|
"learning_rate": 5.744829636724536e-05, |
|
"loss": 2.2453, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.23048317432403564, |
|
"learning_rate": 5.703249004575087e-05, |
|
"loss": 2.2149, |
|
"step": 4695 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.26077502965927124, |
|
"learning_rate": 5.661800015147245e-05, |
|
"loss": 2.4554, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.23060084879398346, |
|
"learning_rate": 5.620482951206626e-05, |
|
"loss": 2.0871, |
|
"step": 4705 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.23240596055984497, |
|
"learning_rate": 5.579298094618865e-05, |
|
"loss": 2.068, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.25822100043296814, |
|
"learning_rate": 5.5382457263476404e-05, |
|
"loss": 2.3347, |
|
"step": 4715 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.2152366042137146, |
|
"learning_rate": 5.497326126452848e-05, |
|
"loss": 2.2502, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.22713406383991241, |
|
"learning_rate": 5.4565395740885784e-05, |
|
"loss": 2.2203, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.19853945076465607, |
|
"learning_rate": 5.4158863475013196e-05, |
|
"loss": 2.3458, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.20661340653896332, |
|
"learning_rate": 5.3753667240279716e-05, |
|
"loss": 2.2679, |
|
"step": 4735 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.2174801528453827, |
|
"learning_rate": 5.334980980094004e-05, |
|
"loss": 2.1825, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.24562963843345642, |
|
"learning_rate": 5.2947293912115836e-05, |
|
"loss": 2.5649, |
|
"step": 4745 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.19424057006835938, |
|
"learning_rate": 5.254612231977624e-05, |
|
"loss": 2.1822, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.220481738448143, |
|
"learning_rate": 5.214629776071986e-05, |
|
"loss": 2.2884, |
|
"step": 4755 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.2209591120481491, |
|
"learning_rate": 5.174782296255581e-05, |
|
"loss": 2.4984, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.23579350113868713, |
|
"learning_rate": 5.135070064368508e-05, |
|
"loss": 2.2501, |
|
"step": 4765 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.2045588195323944, |
|
"learning_rate": 5.095493351328201e-05, |
|
"loss": 2.1386, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.23356984555721283, |
|
"learning_rate": 5.0560524271275804e-05, |
|
"loss": 2.087, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.24123229086399078, |
|
"learning_rate": 5.016747560833221e-05, |
|
"loss": 2.3423, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.23942683637142181, |
|
"learning_rate": 4.977579020583509e-05, |
|
"loss": 2.2332, |
|
"step": 4785 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.22866415977478027, |
|
"learning_rate": 4.938547073586783e-05, |
|
"loss": 2.3039, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.2132473886013031, |
|
"learning_rate": 4.8996519861195964e-05, |
|
"loss": 2.1985, |
|
"step": 4795 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.24595317244529724, |
|
"learning_rate": 4.860894023524792e-05, |
|
"loss": 2.4184, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.21361759305000305, |
|
"learning_rate": 4.822273450209766e-05, |
|
"loss": 2.2322, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.2093634456396103, |
|
"learning_rate": 4.78379052964466e-05, |
|
"loss": 2.283, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.2636638283729553, |
|
"learning_rate": 4.745445524360506e-05, |
|
"loss": 2.1172, |
|
"step": 4815 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.23667100071907043, |
|
"learning_rate": 4.707238695947522e-05, |
|
"loss": 2.2553, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.20943553745746613, |
|
"learning_rate": 4.669170305053239e-05, |
|
"loss": 2.2184, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.22824349999427795, |
|
"learning_rate": 4.631240611380793e-05, |
|
"loss": 2.2645, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.18925786018371582, |
|
"learning_rate": 4.593449873687117e-05, |
|
"loss": 2.3492, |
|
"step": 4835 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.21233032643795013, |
|
"learning_rate": 4.555798349781185e-05, |
|
"loss": 2.2667, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.25917020440101624, |
|
"learning_rate": 4.518286296522248e-05, |
|
"loss": 2.3071, |
|
"step": 4845 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.25229182839393616, |
|
"learning_rate": 4.4809139698180985e-05, |
|
"loss": 2.3561, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.2252894639968872, |
|
"learning_rate": 4.4436816246232855e-05, |
|
"loss": 2.5364, |
|
"step": 4855 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.2240176647901535, |
|
"learning_rate": 4.406589514937437e-05, |
|
"loss": 2.2504, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.2048347145318985, |
|
"learning_rate": 4.369637893803474e-05, |
|
"loss": 2.243, |
|
"step": 4865 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.21030034124851227, |
|
"learning_rate": 4.3328270133058984e-05, |
|
"loss": 2.2484, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.2221430093050003, |
|
"learning_rate": 4.2961571245690945e-05, |
|
"loss": 2.1924, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.22565896809101105, |
|
"learning_rate": 4.2596284777555646e-05, |
|
"loss": 2.2848, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.22470615804195404, |
|
"learning_rate": 4.223241322064306e-05, |
|
"loss": 2.2022, |
|
"step": 4885 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.3345583975315094, |
|
"learning_rate": 4.186995905729013e-05, |
|
"loss": 2.195, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.24246099591255188, |
|
"learning_rate": 4.1508924760164654e-05, |
|
"loss": 2.2407, |
|
"step": 4895 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.2674398124217987, |
|
"learning_rate": 4.1149312792247956e-05, |
|
"loss": 2.2382, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.23504556715488434, |
|
"learning_rate": 4.079112560681816e-05, |
|
"loss": 2.2465, |
|
"step": 4905 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.26757216453552246, |
|
"learning_rate": 4.043436564743361e-05, |
|
"loss": 2.1608, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.2398817539215088, |
|
"learning_rate": 4.007903534791604e-05, |
|
"loss": 2.3138, |
|
"step": 4915 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.21942365169525146, |
|
"learning_rate": 3.9725137132334e-05, |
|
"loss": 2.1774, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.33368757367134094, |
|
"learning_rate": 3.937267341498641e-05, |
|
"loss": 1.9173, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.2571778893470764, |
|
"learning_rate": 3.90216466003859e-05, |
|
"loss": 2.1053, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.21170833706855774, |
|
"learning_rate": 3.86720590832427e-05, |
|
"loss": 2.1862, |
|
"step": 4935 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.2919519543647766, |
|
"learning_rate": 3.8323913248448055e-05, |
|
"loss": 2.0265, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.27031105756759644, |
|
"learning_rate": 3.7977211471057814e-05, |
|
"loss": 2.3355, |
|
"step": 4945 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.22384324669837952, |
|
"learning_rate": 3.763195611627693e-05, |
|
"loss": 2.5411, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.24857838451862335, |
|
"learning_rate": 3.728814953944229e-05, |
|
"loss": 2.1088, |
|
"step": 4955 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.2427876591682434, |
|
"learning_rate": 3.69457940860077e-05, |
|
"loss": 2.3843, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.24170845746994019, |
|
"learning_rate": 3.660489209152701e-05, |
|
"loss": 2.2145, |
|
"step": 4965 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.23208285868167877, |
|
"learning_rate": 3.6265445881638685e-05, |
|
"loss": 1.9527, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.2421301156282425, |
|
"learning_rate": 3.592745777204981e-05, |
|
"loss": 2.3853, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.21723875403404236, |
|
"learning_rate": 3.5590930068520324e-05, |
|
"loss": 2.2972, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.23736509680747986, |
|
"learning_rate": 3.525586506684719e-05, |
|
"loss": 2.3117, |
|
"step": 4985 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.229548841714859, |
|
"learning_rate": 3.492226505284879e-05, |
|
"loss": 2.3366, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.2537176311016083, |
|
"learning_rate": 3.459013230234945e-05, |
|
"loss": 2.3424, |
|
"step": 4995 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.2257246971130371, |
|
"learning_rate": 3.425946908116365e-05, |
|
"loss": 2.3804, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.2141430377960205, |
|
"learning_rate": 3.3930277645080804e-05, |
|
"loss": 2.1892, |
|
"step": 5005 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.26059871912002563, |
|
"learning_rate": 3.3602560239849786e-05, |
|
"loss": 2.1947, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.22793832421302795, |
|
"learning_rate": 3.3276319101163636e-05, |
|
"loss": 2.4303, |
|
"step": 5015 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.2286398559808731, |
|
"learning_rate": 3.295155645464412e-05, |
|
"loss": 2.2965, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.26674941182136536, |
|
"learning_rate": 3.262827451582701e-05, |
|
"loss": 2.1779, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.2102755457162857, |
|
"learning_rate": 3.230647549014634e-05, |
|
"loss": 2.4149, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.23954099416732788, |
|
"learning_rate": 3.1986161572919806e-05, |
|
"loss": 2.3689, |
|
"step": 5035 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.22200442850589752, |
|
"learning_rate": 3.1667334949333935e-05, |
|
"loss": 2.5024, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.23744402825832367, |
|
"learning_rate": 3.13499977944284e-05, |
|
"loss": 2.4955, |
|
"step": 5045 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.26780271530151367, |
|
"learning_rate": 3.1034152273082233e-05, |
|
"loss": 1.8371, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.22308044135570526, |
|
"learning_rate": 3.07198005399981e-05, |
|
"loss": 2.42, |
|
"step": 5055 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.22626711428165436, |
|
"learning_rate": 3.0406944739688247e-05, |
|
"loss": 1.995, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.2728048264980316, |
|
"learning_rate": 3.0095587006459563e-05, |
|
"loss": 2.1475, |
|
"step": 5065 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.22108417749404907, |
|
"learning_rate": 2.978572946439914e-05, |
|
"loss": 2.263, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.21562892198562622, |
|
"learning_rate": 2.9477374227359715e-05, |
|
"loss": 2.2598, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.21353229880332947, |
|
"learning_rate": 2.917052339894538e-05, |
|
"loss": 2.2058, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.24220535159111023, |
|
"learning_rate": 2.8865179072496834e-05, |
|
"loss": 2.1816, |
|
"step": 5085 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.21273867785930634, |
|
"learning_rate": 2.8561343331077794e-05, |
|
"loss": 2.4305, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.2410360872745514, |
|
"learning_rate": 2.8259018247460104e-05, |
|
"loss": 2.3677, |
|
"step": 5095 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.23682256042957306, |
|
"learning_rate": 2.7958205884110022e-05, |
|
"loss": 2.2552, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.2274460345506668, |
|
"learning_rate": 2.765890829317397e-05, |
|
"loss": 2.1807, |
|
"step": 5105 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.21735264360904694, |
|
"learning_rate": 2.7361127516464413e-05, |
|
"loss": 2.3206, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.2178596407175064, |
|
"learning_rate": 2.7064865585446435e-05, |
|
"loss": 2.4849, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.2024727314710617, |
|
"learning_rate": 2.6770124521223156e-05, |
|
"loss": 2.2573, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.23709452152252197, |
|
"learning_rate": 2.6476906334522544e-05, |
|
"loss": 2.4347, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.24053430557250977, |
|
"learning_rate": 2.6185213025683442e-05, |
|
"loss": 2.3239, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.24593400955200195, |
|
"learning_rate": 2.5895046584641925e-05, |
|
"loss": 1.9137, |
|
"step": 5135 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.21003013849258423, |
|
"learning_rate": 2.5606408990917734e-05, |
|
"loss": 2.3909, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.22038382291793823, |
|
"learning_rate": 2.531930221360082e-05, |
|
"loss": 2.2642, |
|
"step": 5145 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.21890124678611755, |
|
"learning_rate": 2.50337282113379e-05, |
|
"loss": 2.248, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.20427949726581573, |
|
"learning_rate": 2.4749688932319046e-05, |
|
"loss": 2.2361, |
|
"step": 5155 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.18769937753677368, |
|
"learning_rate": 2.4467186314264423e-05, |
|
"loss": 2.3167, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.21898379921913147, |
|
"learning_rate": 2.4186222284411115e-05, |
|
"loss": 2.1296, |
|
"step": 5165 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.2148793488740921, |
|
"learning_rate": 2.3906798759499916e-05, |
|
"loss": 2.2478, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.21558383107185364, |
|
"learning_rate": 2.362891764576211e-05, |
|
"loss": 2.2518, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.1823437213897705, |
|
"learning_rate": 2.335258083890701e-05, |
|
"loss": 1.9974, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.214274063706398, |
|
"learning_rate": 2.307779022410819e-05, |
|
"loss": 2.3863, |
|
"step": 5185 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.2402074933052063, |
|
"learning_rate": 2.2804547675991493e-05, |
|
"loss": 2.2795, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.23359552025794983, |
|
"learning_rate": 2.2532855058621486e-05, |
|
"loss": 2.1839, |
|
"step": 5195 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.1988850086927414, |
|
"learning_rate": 2.226271422548931e-05, |
|
"loss": 2.3525, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.23410113155841827, |
|
"learning_rate": 2.199412701949974e-05, |
|
"loss": 2.2221, |
|
"step": 5205 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.28235799074172974, |
|
"learning_rate": 2.1727095272958658e-05, |
|
"loss": 2.3737, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.19444677233695984, |
|
"learning_rate": 2.1461620807560588e-05, |
|
"loss": 2.0633, |
|
"step": 5215 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.2312103509902954, |
|
"learning_rate": 2.1197705434376323e-05, |
|
"loss": 2.4128, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.24011710286140442, |
|
"learning_rate": 2.0935350953840386e-05, |
|
"loss": 2.0844, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.20998087525367737, |
|
"learning_rate": 2.0674559155738986e-05, |
|
"loss": 2.4681, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.23423248529434204, |
|
"learning_rate": 2.0415331819197634e-05, |
|
"loss": 2.3379, |
|
"step": 5235 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.25770696997642517, |
|
"learning_rate": 2.015767071266908e-05, |
|
"loss": 2.329, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.22478801012039185, |
|
"learning_rate": 1.9901577593921193e-05, |
|
"loss": 2.2112, |
|
"step": 5245 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.22718951106071472, |
|
"learning_rate": 1.9647054210024968e-05, |
|
"loss": 2.4007, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.2245934009552002, |
|
"learning_rate": 1.9394102297342842e-05, |
|
"loss": 1.9822, |
|
"step": 5255 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.25233885645866394, |
|
"learning_rate": 1.9142723581516418e-05, |
|
"loss": 1.8663, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.22092613577842712, |
|
"learning_rate": 1.8892919777455054e-05, |
|
"loss": 2.24, |
|
"step": 5265 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.2314857840538025, |
|
"learning_rate": 1.864469258932397e-05, |
|
"loss": 2.3645, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.22606311738491058, |
|
"learning_rate": 1.8398043710532748e-05, |
|
"loss": 2.103, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.2500253915786743, |
|
"learning_rate": 1.8152974823723772e-05, |
|
"loss": 2.0572, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.21233054995536804, |
|
"learning_rate": 1.7909487600760526e-05, |
|
"loss": 2.4147, |
|
"step": 5285 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.27136868238449097, |
|
"learning_rate": 1.766758370271651e-05, |
|
"loss": 2.4119, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.21505457162857056, |
|
"learning_rate": 1.74272647798637e-05, |
|
"loss": 2.0964, |
|
"step": 5295 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.23759673535823822, |
|
"learning_rate": 1.718853247166133e-05, |
|
"loss": 2.4041, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.1982789784669876, |
|
"learning_rate": 1.6951388406744732e-05, |
|
"loss": 2.308, |
|
"step": 5305 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.2191469967365265, |
|
"learning_rate": 1.671583420291434e-05, |
|
"loss": 2.1455, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.21145908534526825, |
|
"learning_rate": 1.6481871467124267e-05, |
|
"loss": 2.2348, |
|
"step": 5315 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.2330002337694168, |
|
"learning_rate": 1.6249501795471938e-05, |
|
"loss": 2.1234, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.22962640225887299, |
|
"learning_rate": 1.6018726773186525e-05, |
|
"loss": 2.1627, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.24119548499584198, |
|
"learning_rate": 1.578954797461879e-05, |
|
"loss": 2.3492, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.21263697743415833, |
|
"learning_rate": 1.5561966963229924e-05, |
|
"loss": 2.3261, |
|
"step": 5335 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.22236277163028717, |
|
"learning_rate": 1.5335985291580785e-05, |
|
"loss": 2.2343, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.21450620889663696, |
|
"learning_rate": 1.5111604501321875e-05, |
|
"loss": 2.2677, |
|
"step": 5345 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.2557521462440491, |
|
"learning_rate": 1.4888826123182153e-05, |
|
"loss": 2.2499, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.22849878668785095, |
|
"learning_rate": 1.4667651676959077e-05, |
|
"loss": 2.1613, |
|
"step": 5355 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.24565404653549194, |
|
"learning_rate": 1.4448082671507989e-05, |
|
"loss": 2.2011, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.26400044560432434, |
|
"learning_rate": 1.4230120604731889e-05, |
|
"loss": 2.5032, |
|
"step": 5365 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.2253475785255432, |
|
"learning_rate": 1.4013766963571262e-05, |
|
"loss": 2.2235, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.2189287692308426, |
|
"learning_rate": 1.3799023223993817e-05, |
|
"loss": 2.3313, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.23772679269313812, |
|
"learning_rate": 1.3585890850984579e-05, |
|
"loss": 2.4549, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.23463352024555206, |
|
"learning_rate": 1.337437129853572e-05, |
|
"loss": 2.1687, |
|
"step": 5385 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.22411875426769257, |
|
"learning_rate": 1.3164466009636778e-05, |
|
"loss": 2.3785, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.23704136908054352, |
|
"learning_rate": 1.295617641626476e-05, |
|
"loss": 2.3108, |
|
"step": 5395 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.19436527788639069, |
|
"learning_rate": 1.2749503939374379e-05, |
|
"loss": 2.1299, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.19581307470798492, |
|
"learning_rate": 1.2544449988888235e-05, |
|
"loss": 2.2972, |
|
"step": 5405 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.2575695216655731, |
|
"learning_rate": 1.2341015963687557e-05, |
|
"loss": 2.236, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.2203637808561325, |
|
"learning_rate": 1.213920325160217e-05, |
|
"loss": 2.0656, |
|
"step": 5415 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.19933323562145233, |
|
"learning_rate": 1.1939013229401524e-05, |
|
"loss": 2.232, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.21780821681022644, |
|
"learning_rate": 1.1740447262784781e-05, |
|
"loss": 2.3308, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.2099289745092392, |
|
"learning_rate": 1.1543506706371998e-05, |
|
"loss": 2.3933, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.262759268283844, |
|
"learning_rate": 1.1348192903694543e-05, |
|
"loss": 2.1099, |
|
"step": 5435 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.28049829602241516, |
|
"learning_rate": 1.1154507187186052e-05, |
|
"loss": 2.3418, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.21992290019989014, |
|
"learning_rate": 1.0962450878173408e-05, |
|
"loss": 2.1937, |
|
"step": 5445 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.21217088401317596, |
|
"learning_rate": 1.0772025286867577e-05, |
|
"loss": 2.2756, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.2052336037158966, |
|
"learning_rate": 1.0583231712354756e-05, |
|
"loss": 2.1314, |
|
"step": 5455 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.2902153432369232, |
|
"learning_rate": 1.039607144258753e-05, |
|
"loss": 2.0119, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.28160467743873596, |
|
"learning_rate": 1.0210545754376021e-05, |
|
"loss": 1.9997, |
|
"step": 5465 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.2551237642765045, |
|
"learning_rate": 1.0026655913379229e-05, |
|
"loss": 2.5376, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.21193884313106537, |
|
"learning_rate": 9.844403174096433e-06, |
|
"loss": 2.1938, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.21645359694957733, |
|
"learning_rate": 9.663788779858408e-06, |
|
"loss": 2.1168, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.19699765741825104, |
|
"learning_rate": 9.484813962819361e-06, |
|
"loss": 2.1105, |
|
"step": 5485 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.21276994049549103, |
|
"learning_rate": 9.307479943948039e-06, |
|
"loss": 2.2799, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.22967317700386047, |
|
"learning_rate": 9.131787933019831e-06, |
|
"loss": 2.5073, |
|
"step": 5495 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.21648356318473816, |
|
"learning_rate": 8.9577391286082e-06, |
|
"loss": 2.3877, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.23259419202804565, |
|
"learning_rate": 8.78533471807666e-06, |
|
"loss": 2.4268, |
|
"step": 5505 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.22746941447257996, |
|
"learning_rate": 8.614575877570696e-06, |
|
"loss": 2.212, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.3145082890987396, |
|
"learning_rate": 8.445463772009638e-06, |
|
"loss": 2.1488, |
|
"step": 5515 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.23564931750297546, |
|
"learning_rate": 8.27799955507877e-06, |
|
"loss": 2.2061, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.22794529795646667, |
|
"learning_rate": 8.112184369221482e-06, |
|
"loss": 2.2778, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.21888861060142517, |
|
"learning_rate": 7.948019345631385e-06, |
|
"loss": 2.1319, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.2596055567264557, |
|
"learning_rate": 7.78550560424479e-06, |
|
"loss": 2.363, |
|
"step": 5535 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.21776314079761505, |
|
"learning_rate": 7.624644253732799e-06, |
|
"loss": 2.1601, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.23371025919914246, |
|
"learning_rate": 7.465436391493885e-06, |
|
"loss": 2.2857, |
|
"step": 5545 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.23128168284893036, |
|
"learning_rate": 7.3078831036465785e-06, |
|
"loss": 2.2666, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.20787528157234192, |
|
"learning_rate": 7.151985465021604e-06, |
|
"loss": 2.5238, |
|
"step": 5555 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.19910071790218353, |
|
"learning_rate": 6.997744539155026e-06, |
|
"loss": 2.2942, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.19725759327411652, |
|
"learning_rate": 6.845161378280784e-06, |
|
"loss": 2.3778, |
|
"step": 5565 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.20873206853866577, |
|
"learning_rate": 6.694237023323335e-06, |
|
"loss": 2.2222, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.2186024934053421, |
|
"learning_rate": 6.544972503891023e-06, |
|
"loss": 2.2635, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.21914984285831451, |
|
"learning_rate": 6.3973688382684965e-06, |
|
"loss": 1.8, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.2277553379535675, |
|
"learning_rate": 6.251427033410195e-06, |
|
"loss": 1.8708, |
|
"step": 5585 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.2023899406194687, |
|
"learning_rate": 6.1071480849332896e-06, |
|
"loss": 2.3289, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.1917661875486374, |
|
"learning_rate": 5.964532977110887e-06, |
|
"loss": 2.175, |
|
"step": 5595 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.3647346496582031, |
|
"learning_rate": 5.82358268286537e-06, |
|
"loss": 2.2159, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.2529740333557129, |
|
"learning_rate": 5.684298163761759e-06, |
|
"loss": 2.3442, |
|
"step": 5605 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.3258639872074127, |
|
"learning_rate": 5.546680370001028e-06, |
|
"loss": 2.1641, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.21551235020160675, |
|
"learning_rate": 5.410730240413942e-06, |
|
"loss": 2.3734, |
|
"step": 5615 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.2223397046327591, |
|
"learning_rate": 5.276448702454195e-06, |
|
"loss": 2.0221, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.2447679489850998, |
|
"learning_rate": 5.143836672192481e-06, |
|
"loss": 2.073, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.20842759311199188, |
|
"learning_rate": 5.012895054310046e-06, |
|
"loss": 2.3238, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.23142164945602417, |
|
"learning_rate": 4.883624742092501e-06, |
|
"loss": 2.2459, |
|
"step": 5635 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.21301431953907013, |
|
"learning_rate": 4.756026617423887e-06, |
|
"loss": 2.3963, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.20507967472076416, |
|
"learning_rate": 4.630101550780447e-06, |
|
"loss": 2.3633, |
|
"step": 5645 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.22849854826927185, |
|
"learning_rate": 4.505850401224921e-06, |
|
"loss": 2.4213, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.21087034046649933, |
|
"learning_rate": 4.3832740164004295e-06, |
|
"loss": 2.3358, |
|
"step": 5655 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.2474902719259262, |
|
"learning_rate": 4.2623732325249295e-06, |
|
"loss": 1.9333, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.22979308664798737, |
|
"learning_rate": 4.143148874385355e-06, |
|
"loss": 2.4708, |
|
"step": 5665 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.24336537718772888, |
|
"learning_rate": 4.025601755332065e-06, |
|
"loss": 2.2467, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.25388413667678833, |
|
"learning_rate": 3.909732677273209e-06, |
|
"loss": 2.0566, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.23465120792388916, |
|
"learning_rate": 3.795542430669374e-06, |
|
"loss": 2.2796, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.23578540980815887, |
|
"learning_rate": 3.6830317945281133e-06, |
|
"loss": 2.2443, |
|
"step": 5685 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.23347768187522888, |
|
"learning_rate": 3.5722015363986725e-06, |
|
"loss": 2.1135, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.23163768649101257, |
|
"learning_rate": 3.4630524123667184e-06, |
|
"loss": 2.5128, |
|
"step": 5695 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.20352327823638916, |
|
"learning_rate": 3.3555851670491467e-06, |
|
"loss": 2.2458, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.20329436659812927, |
|
"learning_rate": 3.24980053358917e-06, |
|
"loss": 2.2312, |
|
"step": 5705 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.2172505259513855, |
|
"learning_rate": 3.1456992336510726e-06, |
|
"loss": 2.3672, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.23608310520648956, |
|
"learning_rate": 3.0432819774154907e-06, |
|
"loss": 2.2544, |
|
"step": 5715 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.23203229904174805, |
|
"learning_rate": 2.942549463574473e-06, |
|
"loss": 2.4567, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.22394001483917236, |
|
"learning_rate": 2.8435023793267068e-06, |
|
"loss": 2.2673, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.2103736698627472, |
|
"learning_rate": 2.7461414003728534e-06, |
|
"loss": 2.347, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.22648335993289948, |
|
"learning_rate": 2.650467190910999e-06, |
|
"loss": 2.2428, |
|
"step": 5735 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.24892614781856537, |
|
"learning_rate": 2.556480403631989e-06, |
|
"loss": 2.0718, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2146364003419876, |
|
"learning_rate": 2.4641816797151272e-06, |
|
"loss": 2.2748, |
|
"step": 5745 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.24034225940704346, |
|
"learning_rate": 2.373571648823597e-06, |
|
"loss": 2.4486, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.19975844025611877, |
|
"learning_rate": 2.284650929100435e-06, |
|
"loss": 1.9455, |
|
"step": 5755 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2771877646446228, |
|
"learning_rate": 2.197420127164118e-06, |
|
"loss": 2.3526, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.20685018599033356, |
|
"learning_rate": 2.1118798381044023e-06, |
|
"loss": 2.2497, |
|
"step": 5765 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.25831371545791626, |
|
"learning_rate": 2.02803064547849e-06, |
|
"loss": 2.4169, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2329753190279007, |
|
"learning_rate": 1.945873121306757e-06, |
|
"loss": 2.1808, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.1924462765455246, |
|
"learning_rate": 1.8654078260691153e-06, |
|
"loss": 2.074, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.21585677564144135, |
|
"learning_rate": 1.7866353087009345e-06, |
|
"loss": 2.1959, |
|
"step": 5785 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.23273953795433044, |
|
"learning_rate": 1.709556106589516e-06, |
|
"loss": 2.3726, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.23679770529270172, |
|
"learning_rate": 1.6341707455703459e-06, |
|
"loss": 2.0897, |
|
"step": 5795 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2224605232477188, |
|
"learning_rate": 1.5604797399234595e-06, |
|
"loss": 2.4029, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.2761968672275543, |
|
"learning_rate": 1.488483592369999e-06, |
|
"loss": 2.1526, |
|
"step": 5805 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.24104470014572144, |
|
"learning_rate": 1.4181827940687453e-06, |
|
"loss": 2.2732, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.19636249542236328, |
|
"learning_rate": 1.3495778246127576e-06, |
|
"loss": 2.1891, |
|
"step": 5815 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.23419730365276337, |
|
"learning_rate": 1.2826691520262114e-06, |
|
"loss": 2.4011, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.21348519623279572, |
|
"learning_rate": 1.2174572327610378e-06, |
|
"loss": 2.3018, |
|
"step": 5825 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.24424676597118378, |
|
"learning_rate": 1.1539425116939006e-06, |
|
"loss": 2.4398, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.3099956810474396, |
|
"learning_rate": 1.092125422123197e-06, |
|
"loss": 2.2096, |
|
"step": 5835 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.20208124816417694, |
|
"learning_rate": 1.0320063857660045e-06, |
|
"loss": 2.235, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.22363179922103882, |
|
"learning_rate": 9.735858127553066e-07, |
|
"loss": 2.3201, |
|
"step": 5845 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.22977089881896973, |
|
"learning_rate": 9.168641016371049e-07, |
|
"loss": 2.2843, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.22061312198638916, |
|
"learning_rate": 8.618416393677552e-07, |
|
"loss": 2.4087, |
|
"step": 5855 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.22010397911071777, |
|
"learning_rate": 8.085188013113032e-07, |
|
"loss": 2.205, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.22382394969463348, |
|
"learning_rate": 7.568959512369577e-07, |
|
"loss": 2.2887, |
|
"step": 5865 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.21971046924591064, |
|
"learning_rate": 7.069734413165663e-07, |
|
"loss": 2.4646, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.21887187659740448, |
|
"learning_rate": 6.587516121221993e-07, |
|
"loss": 2.3914, |
|
"step": 5875 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.20574615895748138, |
|
"learning_rate": 6.122307926239579e-07, |
|
"loss": 2.4691, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.21829432249069214, |
|
"learning_rate": 5.674113001875036e-07, |
|
"loss": 2.328, |
|
"step": 5885 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.20351313054561615, |
|
"learning_rate": 5.242934405720879e-07, |
|
"loss": 2.2867, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.22187699377536774, |
|
"learning_rate": 4.828775079284142e-07, |
|
"loss": 2.6343, |
|
"step": 5895 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.25424492359161377, |
|
"learning_rate": 4.431637847965575e-07, |
|
"loss": 2.214, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.2767295241355896, |
|
"learning_rate": 4.0515254210415907e-07, |
|
"loss": 2.1654, |
|
"step": 5905 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.2616841495037079, |
|
"learning_rate": 3.6884403916445653e-07, |
|
"loss": 2.1905, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.24903003871440887, |
|
"learning_rate": 3.3423852367461817e-07, |
|
"loss": 2.3208, |
|
"step": 5915 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.21697789430618286, |
|
"learning_rate": 3.0133623171396673e-07, |
|
"loss": 2.1046, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.24143174290657043, |
|
"learning_rate": 2.701373877424251e-07, |
|
"loss": 2.2847, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.19240717589855194, |
|
"learning_rate": 2.40642204598962e-07, |
|
"loss": 2.371, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.22101587057113647, |
|
"learning_rate": 2.1285088350017635e-07, |
|
"loss": 2.1873, |
|
"step": 5935 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.30621862411499023, |
|
"learning_rate": 1.86763614038854e-07, |
|
"loss": 2.4638, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.20043669641017914, |
|
"learning_rate": 1.6238057418274666e-07, |
|
"loss": 2.1642, |
|
"step": 5945 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.2317458689212799, |
|
"learning_rate": 1.3970193027332268e-07, |
|
"loss": 2.4367, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.2099936455488205, |
|
"learning_rate": 1.1872783702460144e-07, |
|
"loss": 2.3994, |
|
"step": 5955 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.197767436504364, |
|
"learning_rate": 9.945843752218186e-08, |
|
"loss": 2.1562, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.2169959545135498, |
|
"learning_rate": 8.189386322218773e-08, |
|
"loss": 2.2637, |
|
"step": 5965 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.28220120072364807, |
|
"learning_rate": 6.60342339504072e-08, |
|
"loss": 2.2485, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.25559163093566895, |
|
"learning_rate": 5.187965790143245e-08, |
|
"loss": 2.3216, |
|
"step": 5975 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.21520258486270905, |
|
"learning_rate": 3.943023163804904e-08, |
|
"loss": 2.3339, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.23260967433452606, |
|
"learning_rate": 2.8686040090375453e-08, |
|
"loss": 2.3379, |
|
"step": 5985 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.24584035575389862, |
|
"learning_rate": 1.9647156555474556e-08, |
|
"loss": 2.3093, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.23227186501026154, |
|
"learning_rate": 1.2313642696742955e-08, |
|
"loss": 2.3064, |
|
"step": 5995 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.24923665821552277, |
|
"learning_rate": 6.685548543522435e-09, |
|
"loss": 2.5481, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.2306099534034729, |
|
"learning_rate": 2.7629124907391223e-09, |
|
"loss": 2.3828, |
|
"step": 6005 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.2319669872522354, |
|
"learning_rate": 5.457612986814464e-10, |
|
"loss": 2.4561, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 2.2096378803253174, |
|
"eval_runtime": 372.0471, |
|
"eval_samples_per_second": 3.22, |
|
"eval_steps_per_second": 3.22, |
|
"step": 6014 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 6014, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"total_flos": 6296538124910592.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|