fengyao1909's picture
Upload folder using huggingface_hub
975b390 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.927536231884058,
"eval_steps": 500,
"global_step": 102,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.028985507246376812,
"grad_norm": 11.274454167048727,
"learning_rate": 0.0,
"loss": 0.6357,
"step": 1
},
{
"epoch": 0.057971014492753624,
"grad_norm": 10.143348640958312,
"learning_rate": 9.090909090909091e-07,
"loss": 0.6666,
"step": 2
},
{
"epoch": 0.08695652173913043,
"grad_norm": 9.086409087474683,
"learning_rate": 1.8181818181818183e-06,
"loss": 0.6451,
"step": 3
},
{
"epoch": 0.11594202898550725,
"grad_norm": 8.70666602748876,
"learning_rate": 2.7272727272727272e-06,
"loss": 0.6048,
"step": 4
},
{
"epoch": 0.14492753623188406,
"grad_norm": 8.976482480167713,
"learning_rate": 3.6363636363636366e-06,
"loss": 0.5937,
"step": 5
},
{
"epoch": 0.17391304347826086,
"grad_norm": 6.543196052759934,
"learning_rate": 4.5454545454545455e-06,
"loss": 0.4943,
"step": 6
},
{
"epoch": 0.2028985507246377,
"grad_norm": 7.511256155710179,
"learning_rate": 5.4545454545454545e-06,
"loss": 0.4247,
"step": 7
},
{
"epoch": 0.2318840579710145,
"grad_norm": 3.274069972289533,
"learning_rate": 6.363636363636364e-06,
"loss": 0.302,
"step": 8
},
{
"epoch": 0.2608695652173913,
"grad_norm": 5.507527578193957,
"learning_rate": 7.272727272727273e-06,
"loss": 0.2751,
"step": 9
},
{
"epoch": 0.2898550724637681,
"grad_norm": 3.651481150079746,
"learning_rate": 8.181818181818183e-06,
"loss": 0.2311,
"step": 10
},
{
"epoch": 0.3188405797101449,
"grad_norm": 2.9570009811876314,
"learning_rate": 9.090909090909091e-06,
"loss": 0.2133,
"step": 11
},
{
"epoch": 0.34782608695652173,
"grad_norm": 2.5680951348234355,
"learning_rate": 1e-05,
"loss": 0.2021,
"step": 12
},
{
"epoch": 0.37681159420289856,
"grad_norm": 2.4726534615132394,
"learning_rate": 9.997020702755353e-06,
"loss": 0.2086,
"step": 13
},
{
"epoch": 0.4057971014492754,
"grad_norm": 2.0168121750509207,
"learning_rate": 9.98808636150624e-06,
"loss": 0.1844,
"step": 14
},
{
"epoch": 0.43478260869565216,
"grad_norm": 1.988964439347895,
"learning_rate": 9.973207623475964e-06,
"loss": 0.1561,
"step": 15
},
{
"epoch": 0.463768115942029,
"grad_norm": 1.703404936865681,
"learning_rate": 9.952402219937817e-06,
"loss": 0.1561,
"step": 16
},
{
"epoch": 0.4927536231884058,
"grad_norm": 1.4224019007386586,
"learning_rate": 9.925694945084369e-06,
"loss": 0.1359,
"step": 17
},
{
"epoch": 0.5217391304347826,
"grad_norm": 1.390079768168245,
"learning_rate": 9.893117626479778e-06,
"loss": 0.1422,
"step": 18
},
{
"epoch": 0.5507246376811594,
"grad_norm": 1.363268530198362,
"learning_rate": 9.854709087130261e-06,
"loss": 0.1448,
"step": 19
},
{
"epoch": 0.5797101449275363,
"grad_norm": 1.2167507253689909,
"learning_rate": 9.810515099218004e-06,
"loss": 0.1276,
"step": 20
},
{
"epoch": 0.6086956521739131,
"grad_norm": 1.1574568817316992,
"learning_rate": 9.76058832955357e-06,
"loss": 0.1177,
"step": 21
},
{
"epoch": 0.6376811594202898,
"grad_norm": 1.2101868179533353,
"learning_rate": 9.704988276811883e-06,
"loss": 0.1086,
"step": 22
},
{
"epoch": 0.6666666666666666,
"grad_norm": 1.1144422829365617,
"learning_rate": 9.643781200626512e-06,
"loss": 0.1291,
"step": 23
},
{
"epoch": 0.6956521739130435,
"grad_norm": 1.2450794816372637,
"learning_rate": 9.577040042626832e-06,
"loss": 0.1183,
"step": 24
},
{
"epoch": 0.7246376811594203,
"grad_norm": 1.2167862630409976,
"learning_rate": 9.504844339512096e-06,
"loss": 0.1213,
"step": 25
},
{
"epoch": 0.7536231884057971,
"grad_norm": 1.248557822116599,
"learning_rate": 9.427280128266049e-06,
"loss": 0.1177,
"step": 26
},
{
"epoch": 0.782608695652174,
"grad_norm": 1.113733087204506,
"learning_rate": 9.344439843625034e-06,
"loss": 0.1152,
"step": 27
},
{
"epoch": 0.8115942028985508,
"grad_norm": 1.1038118432089006,
"learning_rate": 9.256422207921757e-06,
"loss": 0.1246,
"step": 28
},
{
"epoch": 0.8405797101449275,
"grad_norm": 1.100623510584588,
"learning_rate": 9.163332113436031e-06,
"loss": 0.1184,
"step": 29
},
{
"epoch": 0.8695652173913043,
"grad_norm": 1.0179727735610475,
"learning_rate": 9.065280497392663e-06,
"loss": 0.1096,
"step": 30
},
{
"epoch": 0.8985507246376812,
"grad_norm": 1.2701457238654967,
"learning_rate": 8.962384209755453e-06,
"loss": 0.1195,
"step": 31
},
{
"epoch": 0.927536231884058,
"grad_norm": 1.1294896292998675,
"learning_rate": 8.854765873974898e-06,
"loss": 0.1105,
"step": 32
},
{
"epoch": 0.9565217391304348,
"grad_norm": 1.1171814840270677,
"learning_rate": 8.742553740855507e-06,
"loss": 0.1175,
"step": 33
},
{
"epoch": 0.9855072463768116,
"grad_norm": 0.9886273247393406,
"learning_rate": 8.625881535716883e-06,
"loss": 0.1058,
"step": 34
},
{
"epoch": 1.0,
"grad_norm": 0.9886273247393406,
"learning_rate": 8.504888299030748e-06,
"loss": 0.1001,
"step": 35
},
{
"epoch": 1.0289855072463767,
"grad_norm": 1.5562818130049305,
"learning_rate": 8.379718220723772e-06,
"loss": 0.0727,
"step": 36
},
{
"epoch": 1.0579710144927537,
"grad_norm": 0.8147596643901716,
"learning_rate": 8.250520468343722e-06,
"loss": 0.0871,
"step": 37
},
{
"epoch": 1.0869565217391304,
"grad_norm": 0.785533808227011,
"learning_rate": 8.117449009293668e-06,
"loss": 0.0752,
"step": 38
},
{
"epoch": 1.1159420289855073,
"grad_norm": 1.067197574454516,
"learning_rate": 7.980662427346127e-06,
"loss": 0.0862,
"step": 39
},
{
"epoch": 1.144927536231884,
"grad_norm": 1.0067934230612672,
"learning_rate": 7.84032373365578e-06,
"loss": 0.0928,
"step": 40
},
{
"epoch": 1.1739130434782608,
"grad_norm": 1.027473082943366,
"learning_rate": 7.696600172495997e-06,
"loss": 0.0757,
"step": 41
},
{
"epoch": 1.2028985507246377,
"grad_norm": 0.9703955436663084,
"learning_rate": 7.5496630219506805e-06,
"loss": 0.0621,
"step": 42
},
{
"epoch": 1.2318840579710144,
"grad_norm": 0.9885265576714246,
"learning_rate": 7.399687389798933e-06,
"loss": 0.0855,
"step": 43
},
{
"epoch": 1.2608695652173914,
"grad_norm": 0.9758981949816004,
"learning_rate": 7.246852004835807e-06,
"loss": 0.084,
"step": 44
},
{
"epoch": 1.289855072463768,
"grad_norm": 0.9024594434416954,
"learning_rate": 7.091339003877826e-06,
"loss": 0.0767,
"step": 45
},
{
"epoch": 1.318840579710145,
"grad_norm": 0.830224350365886,
"learning_rate": 6.933333714707094e-06,
"loss": 0.0806,
"step": 46
},
{
"epoch": 1.3478260869565217,
"grad_norm": 0.8475268092069895,
"learning_rate": 6.773024435212678e-06,
"loss": 0.0686,
"step": 47
},
{
"epoch": 1.3768115942028984,
"grad_norm": 0.8152669559501473,
"learning_rate": 6.6106022089924535e-06,
"loss": 0.0913,
"step": 48
},
{
"epoch": 1.4057971014492754,
"grad_norm": 0.8455140548788239,
"learning_rate": 6.4462605976828395e-06,
"loss": 0.0746,
"step": 49
},
{
"epoch": 1.434782608695652,
"grad_norm": 0.817492348372533,
"learning_rate": 6.280195450287736e-06,
"loss": 0.0976,
"step": 50
},
{
"epoch": 1.463768115942029,
"grad_norm": 0.880952546066598,
"learning_rate": 6.112604669781572e-06,
"loss": 0.0833,
"step": 51
},
{
"epoch": 1.4927536231884058,
"grad_norm": 0.8489601481147181,
"learning_rate": 5.943687977264584e-06,
"loss": 0.0784,
"step": 52
},
{
"epoch": 1.5217391304347827,
"grad_norm": 0.8142122839258903,
"learning_rate": 5.773646673951406e-06,
"loss": 0.0838,
"step": 53
},
{
"epoch": 1.5507246376811594,
"grad_norm": 0.8922929202950094,
"learning_rate": 5.6026834012766155e-06,
"loss": 0.0828,
"step": 54
},
{
"epoch": 1.5797101449275361,
"grad_norm": 0.7122624377371234,
"learning_rate": 5.4310018994030974e-06,
"loss": 0.0797,
"step": 55
},
{
"epoch": 1.608695652173913,
"grad_norm": 0.9217981197726534,
"learning_rate": 5.258806764421048e-06,
"loss": 0.0772,
"step": 56
},
{
"epoch": 1.6376811594202898,
"grad_norm": 0.9257391753751778,
"learning_rate": 5.0863032045269435e-06,
"loss": 0.0766,
"step": 57
},
{
"epoch": 1.6666666666666665,
"grad_norm": 0.8307688279849595,
"learning_rate": 4.913696795473058e-06,
"loss": 0.0818,
"step": 58
},
{
"epoch": 1.6956521739130435,
"grad_norm": 0.9173954819802914,
"learning_rate": 4.741193235578953e-06,
"loss": 0.0927,
"step": 59
},
{
"epoch": 1.7246376811594204,
"grad_norm": 0.8708594385417067,
"learning_rate": 4.568998100596903e-06,
"loss": 0.0665,
"step": 60
},
{
"epoch": 1.7536231884057971,
"grad_norm": 0.8126974465329198,
"learning_rate": 4.397316598723385e-06,
"loss": 0.0839,
"step": 61
},
{
"epoch": 1.7826086956521738,
"grad_norm": 0.853158086441921,
"learning_rate": 4.226353326048594e-06,
"loss": 0.0773,
"step": 62
},
{
"epoch": 1.8115942028985508,
"grad_norm": 0.8525307891771787,
"learning_rate": 4.056312022735417e-06,
"loss": 0.0809,
"step": 63
},
{
"epoch": 1.8405797101449275,
"grad_norm": 1.0467111292284437,
"learning_rate": 3.887395330218429e-06,
"loss": 0.0782,
"step": 64
},
{
"epoch": 1.8695652173913042,
"grad_norm": 0.9286296034389645,
"learning_rate": 3.7198045497122647e-06,
"loss": 0.0774,
"step": 65
},
{
"epoch": 1.8985507246376812,
"grad_norm": 0.7656482475139829,
"learning_rate": 3.553739402317162e-06,
"loss": 0.0767,
"step": 66
},
{
"epoch": 1.927536231884058,
"grad_norm": 0.7766347569440668,
"learning_rate": 3.389397791007548e-06,
"loss": 0.0984,
"step": 67
},
{
"epoch": 1.9565217391304348,
"grad_norm": 0.7336465690307817,
"learning_rate": 3.226975564787322e-06,
"loss": 0.074,
"step": 68
},
{
"epoch": 1.9855072463768115,
"grad_norm": 0.795203124951574,
"learning_rate": 3.0666662852929063e-06,
"loss": 0.0785,
"step": 69
},
{
"epoch": 2.0,
"grad_norm": 1.2688010574239046,
"learning_rate": 2.9086609961221758e-06,
"loss": 0.0659,
"step": 70
},
{
"epoch": 2.028985507246377,
"grad_norm": 0.5159106800993555,
"learning_rate": 2.7531479951641928e-06,
"loss": 0.0501,
"step": 71
},
{
"epoch": 2.0579710144927534,
"grad_norm": 0.49731065359536447,
"learning_rate": 2.6003126102010696e-06,
"loss": 0.0487,
"step": 72
},
{
"epoch": 2.0869565217391304,
"grad_norm": 0.4862325168980843,
"learning_rate": 2.450336978049322e-06,
"loss": 0.051,
"step": 73
},
{
"epoch": 2.1159420289855073,
"grad_norm": 0.4949710047982997,
"learning_rate": 2.3033998275040047e-06,
"loss": 0.0564,
"step": 74
},
{
"epoch": 2.1449275362318843,
"grad_norm": 0.5420095199622179,
"learning_rate": 2.159676266344222e-06,
"loss": 0.0497,
"step": 75
},
{
"epoch": 2.1739130434782608,
"grad_norm": 0.5223890245623899,
"learning_rate": 2.0193375726538737e-06,
"loss": 0.0608,
"step": 76
},
{
"epoch": 2.2028985507246377,
"grad_norm": 0.5072028301413968,
"learning_rate": 1.8825509907063328e-06,
"loss": 0.0466,
"step": 77
},
{
"epoch": 2.2318840579710146,
"grad_norm": 0.6058827129392587,
"learning_rate": 1.7494795316562791e-06,
"loss": 0.0519,
"step": 78
},
{
"epoch": 2.260869565217391,
"grad_norm": 0.5967337337925352,
"learning_rate": 1.6202817792762283e-06,
"loss": 0.0475,
"step": 79
},
{
"epoch": 2.289855072463768,
"grad_norm": 0.45709533088838783,
"learning_rate": 1.4951117009692528e-06,
"loss": 0.0439,
"step": 80
},
{
"epoch": 2.318840579710145,
"grad_norm": 0.5908455438244339,
"learning_rate": 1.374118464283119e-06,
"loss": 0.0447,
"step": 81
},
{
"epoch": 2.3478260869565215,
"grad_norm": 0.5958774937866624,
"learning_rate": 1.257446259144494e-06,
"loss": 0.0398,
"step": 82
},
{
"epoch": 2.3768115942028984,
"grad_norm": 0.5185817292507452,
"learning_rate": 1.145234126025102e-06,
"loss": 0.0522,
"step": 83
},
{
"epoch": 2.4057971014492754,
"grad_norm": 0.517132336779929,
"learning_rate": 1.037615790244549e-06,
"loss": 0.0542,
"step": 84
},
{
"epoch": 2.4347826086956523,
"grad_norm": 0.5753126450523465,
"learning_rate": 9.347195026073369e-07,
"loss": 0.053,
"step": 85
},
{
"epoch": 2.463768115942029,
"grad_norm": 0.5537929012260736,
"learning_rate": 8.366678865639688e-07,
"loss": 0.0533,
"step": 86
},
{
"epoch": 2.4927536231884058,
"grad_norm": 0.5648244812023223,
"learning_rate": 7.435777920782444e-07,
"loss": 0.0451,
"step": 87
},
{
"epoch": 2.5217391304347827,
"grad_norm": 0.6470131986288629,
"learning_rate": 6.555601563749675e-07,
"loss": 0.0638,
"step": 88
},
{
"epoch": 2.550724637681159,
"grad_norm": 0.682212123629125,
"learning_rate": 5.727198717339511e-07,
"loss": 0.0507,
"step": 89
},
{
"epoch": 2.579710144927536,
"grad_norm": 0.5044269543104078,
"learning_rate": 4.951556604879049e-07,
"loss": 0.0498,
"step": 90
},
{
"epoch": 2.608695652173913,
"grad_norm": 0.5906232388239259,
"learning_rate": 4.2295995737316854e-07,
"loss": 0.0487,
"step": 91
},
{
"epoch": 2.63768115942029,
"grad_norm": 0.6323799418493858,
"learning_rate": 3.5621879937348836e-07,
"loss": 0.0409,
"step": 92
},
{
"epoch": 2.6666666666666665,
"grad_norm": 0.5896524185850267,
"learning_rate": 2.9501172318811834e-07,
"loss": 0.0464,
"step": 93
},
{
"epoch": 2.6956521739130435,
"grad_norm": 0.5444149761155485,
"learning_rate": 2.394116704464294e-07,
"loss": 0.0559,
"step": 94
},
{
"epoch": 2.7246376811594204,
"grad_norm": 0.5667834464277357,
"learning_rate": 1.8948490078199767e-07,
"loss": 0.0419,
"step": 95
},
{
"epoch": 2.753623188405797,
"grad_norm": 0.607364619958279,
"learning_rate": 1.4529091286973994e-07,
"loss": 0.0467,
"step": 96
},
{
"epoch": 2.782608695652174,
"grad_norm": 0.5868613570667187,
"learning_rate": 1.0688237352022346e-07,
"loss": 0.05,
"step": 97
},
{
"epoch": 2.8115942028985508,
"grad_norm": 0.558060235895656,
"learning_rate": 7.430505491563101e-08,
"loss": 0.0373,
"step": 98
},
{
"epoch": 2.8405797101449277,
"grad_norm": 0.6239726216026107,
"learning_rate": 4.759778006218407e-08,
"loss": 0.0496,
"step": 99
},
{
"epoch": 2.869565217391304,
"grad_norm": 0.5091602751990856,
"learning_rate": 2.6792376524036878e-08,
"loss": 0.0511,
"step": 100
},
{
"epoch": 2.898550724637681,
"grad_norm": 0.5073066164113447,
"learning_rate": 1.1913638493762369e-08,
"loss": 0.0498,
"step": 101
},
{
"epoch": 2.927536231884058,
"grad_norm": 0.6581905704459963,
"learning_rate": 2.9792972446479605e-09,
"loss": 0.0503,
"step": 102
},
{
"epoch": 2.927536231884058,
"step": 102,
"total_flos": 10226403115008.0,
"train_loss": 0.12416269268621416,
"train_runtime": 5225.659,
"train_samples_per_second": 5.047,
"train_steps_per_second": 0.02
}
],
"logging_steps": 1,
"max_steps": 102,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 10226403115008.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}