|
{ |
|
"best_metric": 0.9951696391786315, |
|
"best_model_checkpoint": "swin-base-patch4-window7-224-finetuned-eurosat/checkpoint-2490", |
|
"epoch": 2.9987959060806744, |
|
"eval_steps": 500, |
|
"global_step": 2490, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.012040939193257074, |
|
"grad_norm": 37.881980895996094, |
|
"learning_rate": 2.0080321285140564e-06, |
|
"loss": 1.3851, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.024081878386514148, |
|
"grad_norm": 27.445205688476562, |
|
"learning_rate": 4.016064257028113e-06, |
|
"loss": 1.1863, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.036122817579771226, |
|
"grad_norm": 26.590789794921875, |
|
"learning_rate": 6.024096385542169e-06, |
|
"loss": 0.9271, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.048163756773028296, |
|
"grad_norm": 24.892778396606445, |
|
"learning_rate": 8.032128514056226e-06, |
|
"loss": 0.5864, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.060204695966285374, |
|
"grad_norm": 28.44145393371582, |
|
"learning_rate": 1.0040160642570281e-05, |
|
"loss": 0.3409, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07224563515954245, |
|
"grad_norm": 53.996341705322266, |
|
"learning_rate": 1.2048192771084338e-05, |
|
"loss": 0.2325, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08428657435279951, |
|
"grad_norm": 52.281253814697266, |
|
"learning_rate": 1.4056224899598394e-05, |
|
"loss": 0.1837, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.09632751354605659, |
|
"grad_norm": 33.00680160522461, |
|
"learning_rate": 1.606425702811245e-05, |
|
"loss": 0.1589, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.10836845273931367, |
|
"grad_norm": 48.353519439697266, |
|
"learning_rate": 1.8072289156626505e-05, |
|
"loss": 0.1678, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12040939193257075, |
|
"grad_norm": 40.386356353759766, |
|
"learning_rate": 2.0080321285140562e-05, |
|
"loss": 0.1699, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13245033112582782, |
|
"grad_norm": 57.39128112792969, |
|
"learning_rate": 2.208835341365462e-05, |
|
"loss": 0.1331, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1444912703190849, |
|
"grad_norm": 32.292842864990234, |
|
"learning_rate": 2.4096385542168677e-05, |
|
"loss": 0.0939, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.15653220951234195, |
|
"grad_norm": 57.7134895324707, |
|
"learning_rate": 2.6104417670682734e-05, |
|
"loss": 0.1299, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.16857314870559903, |
|
"grad_norm": 19.494935989379883, |
|
"learning_rate": 2.8112449799196788e-05, |
|
"loss": 0.1485, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1806140878988561, |
|
"grad_norm": 34.56547164916992, |
|
"learning_rate": 3.012048192771085e-05, |
|
"loss": 0.102, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.19265502709211318, |
|
"grad_norm": 53.24101638793945, |
|
"learning_rate": 3.21285140562249e-05, |
|
"loss": 0.0968, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.20469596628537026, |
|
"grad_norm": 20.195816040039062, |
|
"learning_rate": 3.413654618473896e-05, |
|
"loss": 0.1091, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.21673690547862734, |
|
"grad_norm": 38.21851348876953, |
|
"learning_rate": 3.614457831325301e-05, |
|
"loss": 0.0995, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.22877784467188442, |
|
"grad_norm": 78.3773193359375, |
|
"learning_rate": 3.815261044176707e-05, |
|
"loss": 0.0987, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2408187838651415, |
|
"grad_norm": 19.694732666015625, |
|
"learning_rate": 4.0160642570281125e-05, |
|
"loss": 0.0757, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25285972305839854, |
|
"grad_norm": 22.578950881958008, |
|
"learning_rate": 4.2168674698795186e-05, |
|
"loss": 0.0906, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.26490066225165565, |
|
"grad_norm": 16.256385803222656, |
|
"learning_rate": 4.417670682730924e-05, |
|
"loss": 0.072, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2769416014449127, |
|
"grad_norm": 19.03795623779297, |
|
"learning_rate": 4.61847389558233e-05, |
|
"loss": 0.1194, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2889825406381698, |
|
"grad_norm": 28.470212936401367, |
|
"learning_rate": 4.8192771084337354e-05, |
|
"loss": 0.1137, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.30102347983142685, |
|
"grad_norm": 26.385528564453125, |
|
"learning_rate": 4.9977688531905406e-05, |
|
"loss": 0.0885, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3130644190246839, |
|
"grad_norm": 13.934977531433105, |
|
"learning_rate": 4.97545738509594e-05, |
|
"loss": 0.0767, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.325105358217941, |
|
"grad_norm": 13.682855606079102, |
|
"learning_rate": 4.953145917001339e-05, |
|
"loss": 0.0935, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.33714629741119806, |
|
"grad_norm": 12.338207244873047, |
|
"learning_rate": 4.930834448906738e-05, |
|
"loss": 0.0621, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.34918723660445516, |
|
"grad_norm": 9.674701690673828, |
|
"learning_rate": 4.908522980812137e-05, |
|
"loss": 0.1039, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3612281757977122, |
|
"grad_norm": 16.479585647583008, |
|
"learning_rate": 4.886211512717537e-05, |
|
"loss": 0.0928, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3732691149909693, |
|
"grad_norm": 25.431562423706055, |
|
"learning_rate": 4.8639000446229364e-05, |
|
"loss": 0.0737, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.38531005418422637, |
|
"grad_norm": 6.355775833129883, |
|
"learning_rate": 4.8415885765283355e-05, |
|
"loss": 0.081, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3973509933774834, |
|
"grad_norm": 13.13481616973877, |
|
"learning_rate": 4.8192771084337354e-05, |
|
"loss": 0.0968, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4093919325707405, |
|
"grad_norm": 5.214439868927002, |
|
"learning_rate": 4.7969656403391346e-05, |
|
"loss": 0.0734, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.4214328717639976, |
|
"grad_norm": 17.911102294921875, |
|
"learning_rate": 4.774654172244534e-05, |
|
"loss": 0.0323, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4334738109572547, |
|
"grad_norm": 20.677658081054688, |
|
"learning_rate": 4.7523427041499336e-05, |
|
"loss": 0.0442, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.44551475015051173, |
|
"grad_norm": 21.959959030151367, |
|
"learning_rate": 4.730031236055333e-05, |
|
"loss": 0.1071, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.45755568934376883, |
|
"grad_norm": 11.989217758178711, |
|
"learning_rate": 4.707719767960732e-05, |
|
"loss": 0.1196, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4695966285370259, |
|
"grad_norm": 18.601367950439453, |
|
"learning_rate": 4.685408299866131e-05, |
|
"loss": 0.084, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.481637567730283, |
|
"grad_norm": 26.319194793701172, |
|
"learning_rate": 4.663096831771531e-05, |
|
"loss": 0.1009, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.49367850692354004, |
|
"grad_norm": 16.73026466369629, |
|
"learning_rate": 4.64078536367693e-05, |
|
"loss": 0.065, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5057194461167971, |
|
"grad_norm": 6.364532947540283, |
|
"learning_rate": 4.61847389558233e-05, |
|
"loss": 0.0411, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5177603853100542, |
|
"grad_norm": 15.919637680053711, |
|
"learning_rate": 4.596162427487729e-05, |
|
"loss": 0.0826, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5298013245033113, |
|
"grad_norm": 9.566567420959473, |
|
"learning_rate": 4.5738509593931284e-05, |
|
"loss": 0.0626, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.5418422636965683, |
|
"grad_norm": 15.061929702758789, |
|
"learning_rate": 4.5515394912985275e-05, |
|
"loss": 0.0628, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.5538832028898254, |
|
"grad_norm": 9.935952186584473, |
|
"learning_rate": 4.529228023203927e-05, |
|
"loss": 0.0522, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.5659241420830825, |
|
"grad_norm": 25.847078323364258, |
|
"learning_rate": 4.506916555109326e-05, |
|
"loss": 0.0542, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5779650812763396, |
|
"grad_norm": 26.931169509887695, |
|
"learning_rate": 4.484605087014726e-05, |
|
"loss": 0.0678, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5900060204695966, |
|
"grad_norm": 16.654766082763672, |
|
"learning_rate": 4.4622936189201256e-05, |
|
"loss": 0.056, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6020469596628537, |
|
"grad_norm": 28.53946304321289, |
|
"learning_rate": 4.439982150825525e-05, |
|
"loss": 0.0477, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6140878988561108, |
|
"grad_norm": 27.371204376220703, |
|
"learning_rate": 4.417670682730924e-05, |
|
"loss": 0.0627, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.6261288380493678, |
|
"grad_norm": 10.891378402709961, |
|
"learning_rate": 4.395359214636323e-05, |
|
"loss": 0.0435, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.6381697772426249, |
|
"grad_norm": 16.968677520751953, |
|
"learning_rate": 4.373047746541722e-05, |
|
"loss": 0.0418, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.650210716435882, |
|
"grad_norm": 7.117012023925781, |
|
"learning_rate": 4.350736278447122e-05, |
|
"loss": 0.035, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.6622516556291391, |
|
"grad_norm": 42.22456741333008, |
|
"learning_rate": 4.328424810352521e-05, |
|
"loss": 0.0332, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.6742925948223961, |
|
"grad_norm": 8.816338539123535, |
|
"learning_rate": 4.306113342257921e-05, |
|
"loss": 0.0632, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.6863335340156532, |
|
"grad_norm": 4.652446269989014, |
|
"learning_rate": 4.2838018741633203e-05, |
|
"loss": 0.059, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.6983744732089103, |
|
"grad_norm": 14.168267250061035, |
|
"learning_rate": 4.2614904060687195e-05, |
|
"loss": 0.0457, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.7104154124021673, |
|
"grad_norm": 21.56100845336914, |
|
"learning_rate": 4.239178937974119e-05, |
|
"loss": 0.0339, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.7224563515954244, |
|
"grad_norm": 30.049415588378906, |
|
"learning_rate": 4.2168674698795186e-05, |
|
"loss": 0.041, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7344972907886815, |
|
"grad_norm": 16.741825103759766, |
|
"learning_rate": 4.194556001784918e-05, |
|
"loss": 0.0468, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.7465382299819386, |
|
"grad_norm": 14.885356903076172, |
|
"learning_rate": 4.172244533690317e-05, |
|
"loss": 0.0274, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.7585791691751956, |
|
"grad_norm": 14.38347339630127, |
|
"learning_rate": 4.149933065595716e-05, |
|
"loss": 0.0429, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.7706201083684527, |
|
"grad_norm": 17.572885513305664, |
|
"learning_rate": 4.127621597501116e-05, |
|
"loss": 0.0382, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.7826610475617098, |
|
"grad_norm": 6.489743709564209, |
|
"learning_rate": 4.105310129406515e-05, |
|
"loss": 0.0268, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.7947019867549668, |
|
"grad_norm": 26.665414810180664, |
|
"learning_rate": 4.082998661311915e-05, |
|
"loss": 0.0625, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.8067429259482239, |
|
"grad_norm": 18.689409255981445, |
|
"learning_rate": 4.060687193217314e-05, |
|
"loss": 0.0445, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.818783865141481, |
|
"grad_norm": 9.611627578735352, |
|
"learning_rate": 4.038375725122713e-05, |
|
"loss": 0.036, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.8308248043347382, |
|
"grad_norm": 23.417274475097656, |
|
"learning_rate": 4.0160642570281125e-05, |
|
"loss": 0.0338, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.8428657435279951, |
|
"grad_norm": 10.954760551452637, |
|
"learning_rate": 3.993752788933512e-05, |
|
"loss": 0.0369, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.8549066827212523, |
|
"grad_norm": 23.96639633178711, |
|
"learning_rate": 3.9714413208389115e-05, |
|
"loss": 0.0531, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.8669476219145094, |
|
"grad_norm": 8.682661056518555, |
|
"learning_rate": 3.949129852744311e-05, |
|
"loss": 0.0497, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.8789885611077664, |
|
"grad_norm": 4.494186878204346, |
|
"learning_rate": 3.9268183846497105e-05, |
|
"loss": 0.0285, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.8910295003010235, |
|
"grad_norm": 15.755534172058105, |
|
"learning_rate": 3.90450691655511e-05, |
|
"loss": 0.0322, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.9030704394942806, |
|
"grad_norm": 7.190816879272461, |
|
"learning_rate": 3.882195448460509e-05, |
|
"loss": 0.0301, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.9151113786875377, |
|
"grad_norm": 0.7052440047264099, |
|
"learning_rate": 3.859883980365908e-05, |
|
"loss": 0.0191, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.9271523178807947, |
|
"grad_norm": 10.254659652709961, |
|
"learning_rate": 3.837572512271307e-05, |
|
"loss": 0.0333, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.9391932570740518, |
|
"grad_norm": 16.437780380249023, |
|
"learning_rate": 3.815261044176707e-05, |
|
"loss": 0.0324, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.9512341962673089, |
|
"grad_norm": 17.565311431884766, |
|
"learning_rate": 3.792949576082106e-05, |
|
"loss": 0.0318, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.963275135460566, |
|
"grad_norm": 9.585817337036133, |
|
"learning_rate": 3.770638107987506e-05, |
|
"loss": 0.0322, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.975316074653823, |
|
"grad_norm": 20.597179412841797, |
|
"learning_rate": 3.748326639892905e-05, |
|
"loss": 0.0342, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.9873570138470801, |
|
"grad_norm": 26.928421020507812, |
|
"learning_rate": 3.7260151717983045e-05, |
|
"loss": 0.0388, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.9993979530403372, |
|
"grad_norm": 23.810352325439453, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.0376, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.9993979530403372, |
|
"eval_f1": 0.9759432907017198, |
|
"eval_loss": 0.022286901250481606, |
|
"eval_runtime": 124.0129, |
|
"eval_samples_per_second": 95.232, |
|
"eval_steps_per_second": 2.984, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.0117399157134257, |
|
"grad_norm": 10.749777793884277, |
|
"learning_rate": 3.6813922356091035e-05, |
|
"loss": 0.0336, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.0237808549066827, |
|
"grad_norm": 9.403740882873535, |
|
"learning_rate": 3.659080767514503e-05, |
|
"loss": 0.0179, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.03582179409994, |
|
"grad_norm": 3.2515432834625244, |
|
"learning_rate": 3.636769299419902e-05, |
|
"loss": 0.0318, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.047862733293197, |
|
"grad_norm": 59.94658279418945, |
|
"learning_rate": 3.614457831325301e-05, |
|
"loss": 0.0305, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.059903672486454, |
|
"grad_norm": 29.028059005737305, |
|
"learning_rate": 3.592146363230701e-05, |
|
"loss": 0.0345, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.0719446116797111, |
|
"grad_norm": 19.99188232421875, |
|
"learning_rate": 3.5698348951361e-05, |
|
"loss": 0.0305, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.083985550872968, |
|
"grad_norm": 10.841901779174805, |
|
"learning_rate": 3.5475234270415e-05, |
|
"loss": 0.0283, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.096026490066225, |
|
"grad_norm": 23.107820510864258, |
|
"learning_rate": 3.525211958946899e-05, |
|
"loss": 0.0195, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.1080674292594823, |
|
"grad_norm": 25.3140811920166, |
|
"learning_rate": 3.502900490852298e-05, |
|
"loss": 0.0303, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.1201083684527393, |
|
"grad_norm": 24.250219345092773, |
|
"learning_rate": 3.4805890227576974e-05, |
|
"loss": 0.0426, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.1321493076459963, |
|
"grad_norm": 6.2568793296813965, |
|
"learning_rate": 3.4582775546630966e-05, |
|
"loss": 0.0289, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.1441902468392535, |
|
"grad_norm": 6.120177268981934, |
|
"learning_rate": 3.4359660865684965e-05, |
|
"loss": 0.0469, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.1562311860325105, |
|
"grad_norm": 31.236495971679688, |
|
"learning_rate": 3.413654618473896e-05, |
|
"loss": 0.0297, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.1682721252257675, |
|
"grad_norm": 9.599982261657715, |
|
"learning_rate": 3.3913431503792955e-05, |
|
"loss": 0.0217, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.1803130644190247, |
|
"grad_norm": 20.92270278930664, |
|
"learning_rate": 3.369031682284695e-05, |
|
"loss": 0.0325, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.1923540036122817, |
|
"grad_norm": 10.290868759155273, |
|
"learning_rate": 3.346720214190094e-05, |
|
"loss": 0.0372, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.2043949428055387, |
|
"grad_norm": 17.62599754333496, |
|
"learning_rate": 3.324408746095493e-05, |
|
"loss": 0.0305, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.216435881998796, |
|
"grad_norm": 6.780280590057373, |
|
"learning_rate": 3.302097278000892e-05, |
|
"loss": 0.0172, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.228476821192053, |
|
"grad_norm": 16.38884735107422, |
|
"learning_rate": 3.279785809906292e-05, |
|
"loss": 0.0249, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.2405177603853101, |
|
"grad_norm": 16.210906982421875, |
|
"learning_rate": 3.257474341811691e-05, |
|
"loss": 0.0436, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.2525586995785671, |
|
"grad_norm": 12.718667984008789, |
|
"learning_rate": 3.235162873717091e-05, |
|
"loss": 0.017, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.2645996387718241, |
|
"grad_norm": 0.5069631338119507, |
|
"learning_rate": 3.21285140562249e-05, |
|
"loss": 0.0358, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.2766405779650813, |
|
"grad_norm": 8.307098388671875, |
|
"learning_rate": 3.1905399375278894e-05, |
|
"loss": 0.011, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.2886815171583383, |
|
"grad_norm": 7.443680763244629, |
|
"learning_rate": 3.1682284694332886e-05, |
|
"loss": 0.0254, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.3007224563515956, |
|
"grad_norm": 13.364812850952148, |
|
"learning_rate": 3.1459170013386885e-05, |
|
"loss": 0.0164, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.3127633955448526, |
|
"grad_norm": 17.30490493774414, |
|
"learning_rate": 3.1236055332440876e-05, |
|
"loss": 0.0293, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.3248043347381095, |
|
"grad_norm": 3.6283154487609863, |
|
"learning_rate": 3.101294065149487e-05, |
|
"loss": 0.019, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.3368452739313668, |
|
"grad_norm": 24.23577308654785, |
|
"learning_rate": 3.078982597054887e-05, |
|
"loss": 0.0136, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.3488862131246238, |
|
"grad_norm": 0.5705559849739075, |
|
"learning_rate": 3.056671128960286e-05, |
|
"loss": 0.0282, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.3609271523178808, |
|
"grad_norm": 25.361787796020508, |
|
"learning_rate": 3.034359660865685e-05, |
|
"loss": 0.0272, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.372968091511138, |
|
"grad_norm": 8.514543533325195, |
|
"learning_rate": 3.012048192771085e-05, |
|
"loss": 0.0214, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.385009030704395, |
|
"grad_norm": 25.069978713989258, |
|
"learning_rate": 2.989736724676484e-05, |
|
"loss": 0.0237, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.397049969897652, |
|
"grad_norm": 33.56332015991211, |
|
"learning_rate": 2.9674252565818832e-05, |
|
"loss": 0.0139, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.4090909090909092, |
|
"grad_norm": 6.231565952301025, |
|
"learning_rate": 2.9451137884872827e-05, |
|
"loss": 0.0279, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.4211318482841662, |
|
"grad_norm": 1.0254443883895874, |
|
"learning_rate": 2.922802320392682e-05, |
|
"loss": 0.0242, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.4331727874774232, |
|
"grad_norm": 2.424696445465088, |
|
"learning_rate": 2.900490852298081e-05, |
|
"loss": 0.0215, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.4452137266706804, |
|
"grad_norm": 27.256423950195312, |
|
"learning_rate": 2.878179384203481e-05, |
|
"loss": 0.0501, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.4572546658639374, |
|
"grad_norm": 10.172979354858398, |
|
"learning_rate": 2.85586791610888e-05, |
|
"loss": 0.0362, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.4692956050571944, |
|
"grad_norm": 9.41952896118164, |
|
"learning_rate": 2.8335564480142796e-05, |
|
"loss": 0.027, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.4813365442504516, |
|
"grad_norm": 35.477848052978516, |
|
"learning_rate": 2.8112449799196788e-05, |
|
"loss": 0.0118, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.4933774834437086, |
|
"grad_norm": 0.077400341629982, |
|
"learning_rate": 2.788933511825078e-05, |
|
"loss": 0.0283, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.5054184226369656, |
|
"grad_norm": 10.312284469604492, |
|
"learning_rate": 2.7666220437304775e-05, |
|
"loss": 0.0248, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.5174593618302228, |
|
"grad_norm": 33.70686340332031, |
|
"learning_rate": 2.7443105756358774e-05, |
|
"loss": 0.0073, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.5295003010234798, |
|
"grad_norm": 1.402552604675293, |
|
"learning_rate": 2.7219991075412765e-05, |
|
"loss": 0.0515, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.5415412402167368, |
|
"grad_norm": 32.386474609375, |
|
"learning_rate": 2.6996876394466757e-05, |
|
"loss": 0.0142, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.553582179409994, |
|
"grad_norm": 17.349578857421875, |
|
"learning_rate": 2.6773761713520752e-05, |
|
"loss": 0.019, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.5656231186032512, |
|
"grad_norm": 7.253089904785156, |
|
"learning_rate": 2.6550647032574744e-05, |
|
"loss": 0.0201, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.577664057796508, |
|
"grad_norm": 0.33720967173576355, |
|
"learning_rate": 2.6327532351628736e-05, |
|
"loss": 0.0106, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.5897049969897652, |
|
"grad_norm": 33.96757125854492, |
|
"learning_rate": 2.6104417670682734e-05, |
|
"loss": 0.0357, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.6017459361830224, |
|
"grad_norm": 14.57841682434082, |
|
"learning_rate": 2.5881302989736726e-05, |
|
"loss": 0.0138, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.6137868753762792, |
|
"grad_norm": 14.796563148498535, |
|
"learning_rate": 2.565818830879072e-05, |
|
"loss": 0.011, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.6258278145695364, |
|
"grad_norm": 25.40349578857422, |
|
"learning_rate": 2.5435073627844713e-05, |
|
"loss": 0.0222, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.6378687537627936, |
|
"grad_norm": 15.135784149169922, |
|
"learning_rate": 2.5211958946898705e-05, |
|
"loss": 0.0238, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.6499096929560506, |
|
"grad_norm": 14.570870399475098, |
|
"learning_rate": 2.4988844265952703e-05, |
|
"loss": 0.0236, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.6619506321493076, |
|
"grad_norm": 15.314672470092773, |
|
"learning_rate": 2.4765729585006695e-05, |
|
"loss": 0.0265, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.6739915713425648, |
|
"grad_norm": 4.963397026062012, |
|
"learning_rate": 2.4542614904060687e-05, |
|
"loss": 0.0165, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.6860325105358218, |
|
"grad_norm": 11.424381256103516, |
|
"learning_rate": 2.4319500223114682e-05, |
|
"loss": 0.0244, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.6980734497290788, |
|
"grad_norm": 0.2027626931667328, |
|
"learning_rate": 2.4096385542168677e-05, |
|
"loss": 0.0101, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.710114388922336, |
|
"grad_norm": 11.940728187561035, |
|
"learning_rate": 2.387327086122267e-05, |
|
"loss": 0.0201, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.722155328115593, |
|
"grad_norm": 12.448373794555664, |
|
"learning_rate": 2.3650156180276664e-05, |
|
"loss": 0.0425, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.73419626730885, |
|
"grad_norm": 15.968953132629395, |
|
"learning_rate": 2.3427041499330656e-05, |
|
"loss": 0.0294, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.7462372065021072, |
|
"grad_norm": 0.5868140459060669, |
|
"learning_rate": 2.320392681838465e-05, |
|
"loss": 0.0328, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.7582781456953642, |
|
"grad_norm": 18.026885986328125, |
|
"learning_rate": 2.2980812137438646e-05, |
|
"loss": 0.0363, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.7703190848886212, |
|
"grad_norm": 26.996437072753906, |
|
"learning_rate": 2.2757697456492638e-05, |
|
"loss": 0.032, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.7823600240818784, |
|
"grad_norm": 25.193265914916992, |
|
"learning_rate": 2.253458277554663e-05, |
|
"loss": 0.0191, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.7944009632751354, |
|
"grad_norm": 1.650780439376831, |
|
"learning_rate": 2.2311468094600628e-05, |
|
"loss": 0.0147, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.8064419024683924, |
|
"grad_norm": 0.1049940213561058, |
|
"learning_rate": 2.208835341365462e-05, |
|
"loss": 0.0213, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.8184828416616496, |
|
"grad_norm": 1.9366893768310547, |
|
"learning_rate": 2.186523873270861e-05, |
|
"loss": 0.0176, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.8305237808549066, |
|
"grad_norm": 0.09167797118425369, |
|
"learning_rate": 2.1642124051762607e-05, |
|
"loss": 0.0227, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.8425647200481636, |
|
"grad_norm": 20.63637351989746, |
|
"learning_rate": 2.1419009370816602e-05, |
|
"loss": 0.0199, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.8546056592414208, |
|
"grad_norm": 7.514628887176514, |
|
"learning_rate": 2.1195894689870593e-05, |
|
"loss": 0.0159, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.866646598434678, |
|
"grad_norm": 27.5736141204834, |
|
"learning_rate": 2.097278000892459e-05, |
|
"loss": 0.0189, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.8786875376279348, |
|
"grad_norm": 9.517942428588867, |
|
"learning_rate": 2.074966532797858e-05, |
|
"loss": 0.0036, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.890728476821192, |
|
"grad_norm": 1.2054420709609985, |
|
"learning_rate": 2.0526550647032576e-05, |
|
"loss": 0.0211, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.9027694160144493, |
|
"grad_norm": 3.337954521179199, |
|
"learning_rate": 2.030343596608657e-05, |
|
"loss": 0.0207, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.914810355207706, |
|
"grad_norm": 10.905184745788574, |
|
"learning_rate": 2.0080321285140562e-05, |
|
"loss": 0.0123, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.9268512944009633, |
|
"grad_norm": 7.964990615844727, |
|
"learning_rate": 1.9857206604194558e-05, |
|
"loss": 0.0204, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.9388922335942205, |
|
"grad_norm": 11.229500770568848, |
|
"learning_rate": 1.9634091923248553e-05, |
|
"loss": 0.0258, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.9509331727874775, |
|
"grad_norm": 21.939224243164062, |
|
"learning_rate": 1.9410977242302544e-05, |
|
"loss": 0.0121, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.9629741119807345, |
|
"grad_norm": 3.842597723007202, |
|
"learning_rate": 1.9187862561356536e-05, |
|
"loss": 0.0157, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.9750150511739917, |
|
"grad_norm": 17.12714958190918, |
|
"learning_rate": 1.896474788041053e-05, |
|
"loss": 0.0159, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.9870559903672487, |
|
"grad_norm": 23.860105514526367, |
|
"learning_rate": 1.8741633199464527e-05, |
|
"loss": 0.0069, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.9990969295605057, |
|
"grad_norm": 31.328365325927734, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 0.0088, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.9990969295605057, |
|
"eval_f1": 0.991992163050005, |
|
"eval_loss": 0.014776448719203472, |
|
"eval_runtime": 123.3802, |
|
"eval_samples_per_second": 95.72, |
|
"eval_steps_per_second": 2.999, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.011438892233594, |
|
"grad_norm": 20.75777244567871, |
|
"learning_rate": 1.8295403837572513e-05, |
|
"loss": 0.0128, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.0234798314268514, |
|
"grad_norm": 0.30569037795066833, |
|
"learning_rate": 1.8072289156626505e-05, |
|
"loss": 0.0044, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.035520770620108, |
|
"grad_norm": 0.06706225126981735, |
|
"learning_rate": 1.78491744756805e-05, |
|
"loss": 0.0133, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.0475617098133654, |
|
"grad_norm": 38.09376907348633, |
|
"learning_rate": 1.7626059794734495e-05, |
|
"loss": 0.0068, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.0596026490066226, |
|
"grad_norm": 26.19437599182129, |
|
"learning_rate": 1.7402945113788487e-05, |
|
"loss": 0.0258, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.07164358819988, |
|
"grad_norm": 0.3012349307537079, |
|
"learning_rate": 1.7179830432842482e-05, |
|
"loss": 0.0255, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.0836845273931366, |
|
"grad_norm": 0.3738686144351959, |
|
"learning_rate": 1.6956715751896478e-05, |
|
"loss": 0.0299, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.095725466586394, |
|
"grad_norm": 15.113606452941895, |
|
"learning_rate": 1.673360107095047e-05, |
|
"loss": 0.0159, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.107766405779651, |
|
"grad_norm": 0.10998114198446274, |
|
"learning_rate": 1.651048639000446e-05, |
|
"loss": 0.0123, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.119807344972908, |
|
"grad_norm": 0.23346582055091858, |
|
"learning_rate": 1.6287371709058456e-05, |
|
"loss": 0.0015, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.131848284166165, |
|
"grad_norm": 11.610380172729492, |
|
"learning_rate": 1.606425702811245e-05, |
|
"loss": 0.0279, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.1438892233594222, |
|
"grad_norm": 40.500938415527344, |
|
"learning_rate": 1.5841142347166443e-05, |
|
"loss": 0.0024, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.155930162552679, |
|
"grad_norm": 10.448357582092285, |
|
"learning_rate": 1.5618027666220438e-05, |
|
"loss": 0.0319, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.167971101745936, |
|
"grad_norm": 33.20566940307617, |
|
"learning_rate": 1.5394912985274433e-05, |
|
"loss": 0.0138, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.1800120409391934, |
|
"grad_norm": 1.2241170406341553, |
|
"learning_rate": 1.5171798304328425e-05, |
|
"loss": 0.0021, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.19205298013245, |
|
"grad_norm": 3.8104119300842285, |
|
"learning_rate": 1.494868362338242e-05, |
|
"loss": 0.005, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.2040939193257074, |
|
"grad_norm": 0.266283243894577, |
|
"learning_rate": 1.4725568942436414e-05, |
|
"loss": 0.0073, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.2161348585189646, |
|
"grad_norm": 17.923351287841797, |
|
"learning_rate": 1.4502454261490405e-05, |
|
"loss": 0.0164, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.2281757977122214, |
|
"grad_norm": 34.860801696777344, |
|
"learning_rate": 1.42793395805444e-05, |
|
"loss": 0.0217, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.2402167369054786, |
|
"grad_norm": 13.830132484436035, |
|
"learning_rate": 1.4056224899598394e-05, |
|
"loss": 0.0116, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.252257676098736, |
|
"grad_norm": 30.40813446044922, |
|
"learning_rate": 1.3833110218652387e-05, |
|
"loss": 0.0145, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.2642986152919926, |
|
"grad_norm": 5.166720867156982, |
|
"learning_rate": 1.3609995537706383e-05, |
|
"loss": 0.0114, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.27633955448525, |
|
"grad_norm": 0.14178332686424255, |
|
"learning_rate": 1.3386880856760376e-05, |
|
"loss": 0.006, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.288380493678507, |
|
"grad_norm": 14.360862731933594, |
|
"learning_rate": 1.3163766175814368e-05, |
|
"loss": 0.0196, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.300421432871764, |
|
"grad_norm": 36.61174392700195, |
|
"learning_rate": 1.2940651494868363e-05, |
|
"loss": 0.0112, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.312462372065021, |
|
"grad_norm": 0.05367890000343323, |
|
"learning_rate": 1.2717536813922356e-05, |
|
"loss": 0.0044, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.3245033112582782, |
|
"grad_norm": 5.995903968811035, |
|
"learning_rate": 1.2494422132976352e-05, |
|
"loss": 0.0068, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.336544250451535, |
|
"grad_norm": 0.1826033890247345, |
|
"learning_rate": 1.2271307452030343e-05, |
|
"loss": 0.0037, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.3485851896447922, |
|
"grad_norm": 0.022175664082169533, |
|
"learning_rate": 1.2048192771084338e-05, |
|
"loss": 0.0039, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.3606261288380495, |
|
"grad_norm": 0.009515208192169666, |
|
"learning_rate": 1.1825078090138332e-05, |
|
"loss": 0.0174, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.3726670680313067, |
|
"grad_norm": 1.4253636598587036, |
|
"learning_rate": 1.1601963409192325e-05, |
|
"loss": 0.0172, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.3847080072245634, |
|
"grad_norm": 0.5044592022895813, |
|
"learning_rate": 1.1378848728246319e-05, |
|
"loss": 0.0136, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.3967489464178207, |
|
"grad_norm": 0.10618968307971954, |
|
"learning_rate": 1.1155734047300314e-05, |
|
"loss": 0.0134, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.4087898856110774, |
|
"grad_norm": 0.37878698110580444, |
|
"learning_rate": 1.0932619366354306e-05, |
|
"loss": 0.0077, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.4208308248043346, |
|
"grad_norm": 0.15668153762817383, |
|
"learning_rate": 1.0709504685408301e-05, |
|
"loss": 0.0045, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.432871763997592, |
|
"grad_norm": 43.199405670166016, |
|
"learning_rate": 1.0486390004462294e-05, |
|
"loss": 0.0107, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.444912703190849, |
|
"grad_norm": 25.30035972595215, |
|
"learning_rate": 1.0263275323516288e-05, |
|
"loss": 0.0067, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.456953642384106, |
|
"grad_norm": 0.006626456510275602, |
|
"learning_rate": 1.0040160642570281e-05, |
|
"loss": 0.0122, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.468994581577363, |
|
"grad_norm": 6.633700847625732, |
|
"learning_rate": 9.817045961624276e-06, |
|
"loss": 0.0094, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.4810355207706203, |
|
"grad_norm": 0.006737573072314262, |
|
"learning_rate": 9.593931280678268e-06, |
|
"loss": 0.0069, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.493076459963877, |
|
"grad_norm": 0.12591099739074707, |
|
"learning_rate": 9.370816599732263e-06, |
|
"loss": 0.0134, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.5051173991571343, |
|
"grad_norm": 0.9642283916473389, |
|
"learning_rate": 9.147701918786257e-06, |
|
"loss": 0.0047, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.5171583383503915, |
|
"grad_norm": 17.466915130615234, |
|
"learning_rate": 8.92458723784025e-06, |
|
"loss": 0.0136, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.5291992775436483, |
|
"grad_norm": 0.034244559705257416, |
|
"learning_rate": 8.701472556894244e-06, |
|
"loss": 0.0111, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.5412402167369055, |
|
"grad_norm": 0.06431080400943756, |
|
"learning_rate": 8.478357875948239e-06, |
|
"loss": 0.0027, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 2.5532811559301627, |
|
"grad_norm": 0.09189895540475845, |
|
"learning_rate": 8.25524319500223e-06, |
|
"loss": 0.0153, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 2.5653220951234195, |
|
"grad_norm": 0.2820568084716797, |
|
"learning_rate": 8.032128514056226e-06, |
|
"loss": 0.0082, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 2.5773630343166767, |
|
"grad_norm": 0.28683242201805115, |
|
"learning_rate": 7.809013833110219e-06, |
|
"loss": 0.0069, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 2.589403973509934, |
|
"grad_norm": 0.005995332263410091, |
|
"learning_rate": 7.5858991521642126e-06, |
|
"loss": 0.003, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.601444912703191, |
|
"grad_norm": 0.008528484962880611, |
|
"learning_rate": 7.362784471218207e-06, |
|
"loss": 0.009, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.613485851896448, |
|
"grad_norm": 0.0026729849632829428, |
|
"learning_rate": 7.1396697902722e-06, |
|
"loss": 0.002, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.625526791089705, |
|
"grad_norm": 0.052899375557899475, |
|
"learning_rate": 6.916555109326194e-06, |
|
"loss": 0.0129, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.637567730282962, |
|
"grad_norm": 4.501436710357666, |
|
"learning_rate": 6.693440428380188e-06, |
|
"loss": 0.0124, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.649608669476219, |
|
"grad_norm": 7.804245948791504, |
|
"learning_rate": 6.4703257474341815e-06, |
|
"loss": 0.0037, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.6616496086694763, |
|
"grad_norm": 0.03340630233287811, |
|
"learning_rate": 6.247211066488176e-06, |
|
"loss": 0.0064, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.6736905478627335, |
|
"grad_norm": 0.016790101304650307, |
|
"learning_rate": 6.024096385542169e-06, |
|
"loss": 0.0124, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.6857314870559903, |
|
"grad_norm": 0.05290292948484421, |
|
"learning_rate": 5.800981704596163e-06, |
|
"loss": 0.0023, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.6977724262492475, |
|
"grad_norm": 7.013312816619873, |
|
"learning_rate": 5.577867023650157e-06, |
|
"loss": 0.0093, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.7098133654425043, |
|
"grad_norm": 5.808313846588135, |
|
"learning_rate": 5.3547523427041504e-06, |
|
"loss": 0.0025, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.7218543046357615, |
|
"grad_norm": 0.07213304936885834, |
|
"learning_rate": 5.131637661758144e-06, |
|
"loss": 0.0036, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.7338952438290187, |
|
"grad_norm": 0.8108634352684021, |
|
"learning_rate": 4.908522980812138e-06, |
|
"loss": 0.0019, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.745936183022276, |
|
"grad_norm": 0.007346575614064932, |
|
"learning_rate": 4.685408299866132e-06, |
|
"loss": 0.0039, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.7579771222155327, |
|
"grad_norm": 12.846696853637695, |
|
"learning_rate": 4.462293618920125e-06, |
|
"loss": 0.0203, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.77001806140879, |
|
"grad_norm": 1.8753130435943604, |
|
"learning_rate": 4.239178937974119e-06, |
|
"loss": 0.0004, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.7820590006020467, |
|
"grad_norm": 0.018001612275838852, |
|
"learning_rate": 4.016064257028113e-06, |
|
"loss": 0.0054, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.794099939795304, |
|
"grad_norm": 1.2618842124938965, |
|
"learning_rate": 3.7929495760821063e-06, |
|
"loss": 0.0005, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.806140878988561, |
|
"grad_norm": 0.04195760190486908, |
|
"learning_rate": 3.5698348951361e-06, |
|
"loss": 0.0164, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.8181818181818183, |
|
"grad_norm": 0.20935901999473572, |
|
"learning_rate": 3.346720214190094e-06, |
|
"loss": 0.0121, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.830222757375075, |
|
"grad_norm": 0.14867332577705383, |
|
"learning_rate": 3.123605533244088e-06, |
|
"loss": 0.0002, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.8422636965683323, |
|
"grad_norm": 12.636212348937988, |
|
"learning_rate": 2.9004908522980813e-06, |
|
"loss": 0.0059, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.8543046357615895, |
|
"grad_norm": 0.08282257616519928, |
|
"learning_rate": 2.6773761713520752e-06, |
|
"loss": 0.0072, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.8663455749548463, |
|
"grad_norm": 37.93838882446289, |
|
"learning_rate": 2.454261490406069e-06, |
|
"loss": 0.0139, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.8783865141481035, |
|
"grad_norm": 0.531898558139801, |
|
"learning_rate": 2.2311468094600625e-06, |
|
"loss": 0.0065, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.8904274533413608, |
|
"grad_norm": 17.711135864257812, |
|
"learning_rate": 2.0080321285140564e-06, |
|
"loss": 0.007, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.902468392534618, |
|
"grad_norm": 0.04107944294810295, |
|
"learning_rate": 1.78491744756805e-06, |
|
"loss": 0.0041, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.9145093317278747, |
|
"grad_norm": 0.00876621063798666, |
|
"learning_rate": 1.561802766622044e-06, |
|
"loss": 0.0098, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.926550270921132, |
|
"grad_norm": 0.09590450674295425, |
|
"learning_rate": 1.3386880856760376e-06, |
|
"loss": 0.0004, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.9385912101143887, |
|
"grad_norm": 0.335151344537735, |
|
"learning_rate": 1.1155734047300313e-06, |
|
"loss": 0.002, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.950632149307646, |
|
"grad_norm": 17.89304542541504, |
|
"learning_rate": 8.92458723784025e-07, |
|
"loss": 0.0038, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.962673088500903, |
|
"grad_norm": 7.468949794769287, |
|
"learning_rate": 6.693440428380188e-07, |
|
"loss": 0.0054, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.9747140276941604, |
|
"grad_norm": 1.7089877128601074, |
|
"learning_rate": 4.462293618920125e-07, |
|
"loss": 0.0044, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.986754966887417, |
|
"grad_norm": 0.03323278948664665, |
|
"learning_rate": 2.2311468094600626e-07, |
|
"loss": 0.0097, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.9987959060806744, |
|
"grad_norm": 0.1638062298297882, |
|
"learning_rate": 0.0, |
|
"loss": 0.0042, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.9987959060806744, |
|
"eval_f1": 0.9951696391786315, |
|
"eval_loss": 0.009932301938533783, |
|
"eval_runtime": 123.5306, |
|
"eval_samples_per_second": 95.604, |
|
"eval_steps_per_second": 2.995, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.9987959060806744, |
|
"step": 2490, |
|
"total_flos": 2.4970885598061527e+19, |
|
"train_loss": 0.05150821726141782, |
|
"train_runtime": 5585.3207, |
|
"train_samples_per_second": 57.088, |
|
"train_steps_per_second": 0.446 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2490, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.4970885598061527e+19, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|