{ "best_global_step": 300, "best_metric": 0.5585977435112, "best_model_checkpoint": "Llama-3.2-1B-it-Medical-LoRA/checkpoint-300", "epoch": 0.6795016987542469, "eval_steps": 100, "global_step": 300, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.11325028312570781, "grad_norm": 0.6456167697906494, "learning_rate": 0.00019075425790754258, "loss": 0.643, "step": 50 }, { "epoch": 0.22650056625141562, "grad_norm": 1.0792498588562012, "learning_rate": 0.0001664233576642336, "loss": 0.6465, "step": 100 }, { "epoch": 0.22650056625141562, "eval_loss": 0.5826964378356934, "eval_runtime": 394.3597, "eval_samples_per_second": 3.979, "eval_steps_per_second": 0.5, "step": 100 }, { "epoch": 0.33975084937712347, "grad_norm": 0.5628879070281982, "learning_rate": 0.0001420924574209246, "loss": 0.5815, "step": 150 }, { "epoch": 0.45300113250283125, "grad_norm": 0.5532649755477905, "learning_rate": 0.00011776155717761557, "loss": 0.5435, "step": 200 }, { "epoch": 0.45300113250283125, "eval_loss": 0.5700864791870117, "eval_runtime": 394.4221, "eval_samples_per_second": 3.978, "eval_steps_per_second": 0.499, "step": 200 }, { "epoch": 0.5662514156285391, "grad_norm": 0.7321934700012207, "learning_rate": 9.343065693430657e-05, "loss": 0.5816, "step": 250 }, { "epoch": 0.6795016987542469, "grad_norm": 0.43280959129333496, "learning_rate": 6.909975669099758e-05, "loss": 0.5611, "step": 300 }, { "epoch": 0.6795016987542469, "eval_loss": 0.5585977435112, "eval_runtime": 393.6528, "eval_samples_per_second": 3.986, "eval_steps_per_second": 0.5, "step": 300 } ], "logging_steps": 50, "max_steps": 441, "num_input_tokens_seen": 0, "num_train_epochs": 1, "save_steps": 100, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": false }, "attributes": {} } }, "total_flos": 3.376491779948544e+16, "train_batch_size": 8, "trial_name": null, "trial_params": null }