prithivMLmods's picture
Upload folder using huggingface_hub
2a01586 verified
raw
history blame
1.47 kB
{
"best_global_step": 1378,
"best_metric": 0.34233906865119934,
"best_model_checkpoint": "siglip2-finetune-full/checkpoint-1378",
"epoch": 1.0,
"eval_steps": 500,
"global_step": 1378,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.36284470246734396,
"grad_norm": 9.84671688079834,
"learning_rate": 0.00016674057649667407,
"loss": 0.9043,
"step": 500
},
{
"epoch": 0.7256894049346879,
"grad_norm": 11.858268737792969,
"learning_rate": 0.00012978566149297858,
"loss": 0.5152,
"step": 1000
},
{
"epoch": 1.0,
"eval_accuracy": 0.9016836086404066,
"eval_loss": 0.34233906865119934,
"eval_model_preparation_time": 0.0023,
"eval_runtime": 552.3647,
"eval_samples_per_second": 79.788,
"eval_steps_per_second": 9.973,
"step": 1378
}
],
"logging_steps": 500,
"max_steps": 2756,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 3.692588424596177e+18,
"train_batch_size": 32,
"trial_name": null,
"trial_params": null
}