muhtasham's picture
Model save
a720745 verified
{
"best_metric": 0.09891285747289658,
"best_model_checkpoint": "output/output_minicpmv26_upsampled_3686400_pixels/checkpoint-200",
"epoch": 0.704225352112676,
"eval_steps": 50,
"global_step": 200,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.176056338028169,
"eval_loss": 0.1100488156080246,
"eval_runtime": 190.5006,
"eval_samples_per_second": 5.953,
"eval_steps_per_second": 0.373,
"step": 50
},
{
"epoch": 0.352112676056338,
"eval_loss": 0.10178369283676147,
"eval_runtime": 190.517,
"eval_samples_per_second": 5.952,
"eval_steps_per_second": 0.373,
"step": 100
},
{
"epoch": 0.528169014084507,
"eval_loss": 0.09937668591737747,
"eval_runtime": 189.9102,
"eval_samples_per_second": 5.971,
"eval_steps_per_second": 0.374,
"step": 150
},
{
"epoch": 0.704225352112676,
"eval_loss": 0.09891285747289658,
"eval_runtime": 191.1583,
"eval_samples_per_second": 5.932,
"eval_steps_per_second": 0.371,
"step": 200
},
{
"epoch": 0.704225352112676,
"grad_norm": 1.8547923564910889,
"learning_rate": 1e-06,
"loss": 0.139,
"step": 200
},
{
"epoch": 0.704225352112676,
"step": 200,
"total_flos": 3.598989492318372e+17,
"train_loss": 0.1389956569671631,
"train_runtime": 3953.2203,
"train_samples_per_second": 2.024,
"train_steps_per_second": 0.126
}
],
"logging_steps": 1.0,
"max_steps": 500,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 50,
"total_flos": 3.598989492318372e+17,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}