kevinwang676's picture
Upload folder using huggingface_hub
5180c5e verified
raw
history blame
2.34 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.5984,
"eval_steps": 500,
"global_step": 500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.16,
"grad_norm": 0.17174428701400757,
"learning_rate": 0.0001,
"loss": 1.229,
"step": 50
},
{
"epoch": 0.32,
"grad_norm": 0.1649257093667984,
"learning_rate": 0.0002,
"loss": 1.0561,
"step": 100
},
{
"epoch": 0.48,
"grad_norm": 0.17078982293605804,
"learning_rate": 0.0001943566591422122,
"loss": 1.0143,
"step": 150
},
{
"epoch": 0.64,
"grad_norm": 0.17896169424057007,
"learning_rate": 0.0001887133182844244,
"loss": 0.9925,
"step": 200
},
{
"epoch": 0.8,
"grad_norm": 0.17166747152805328,
"learning_rate": 0.0001830699774266366,
"loss": 0.9619,
"step": 250
},
{
"epoch": 0.96,
"grad_norm": 0.1940702199935913,
"learning_rate": 0.00017742663656884877,
"loss": 0.9658,
"step": 300
},
{
"epoch": 1.1184,
"grad_norm": 0.1746826320886612,
"learning_rate": 0.00017178329571106095,
"loss": 0.9479,
"step": 350
},
{
"epoch": 1.2784,
"grad_norm": 0.18184725940227509,
"learning_rate": 0.00016613995485327313,
"loss": 0.9126,
"step": 400
},
{
"epoch": 1.4384000000000001,
"grad_norm": 0.18270964920520782,
"learning_rate": 0.00016049661399548536,
"loss": 0.9387,
"step": 450
},
{
"epoch": 1.5984,
"grad_norm": 0.17156830430030823,
"learning_rate": 0.00015485327313769753,
"loss": 0.9285,
"step": 500
}
],
"logging_steps": 50,
"max_steps": 1872,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.5487075260730368e+17,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}