|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.20015515903801395, |
|
"eval_steps": 20, |
|
"global_step": 258, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0007757951900698216, |
|
"grad_norm": Infinity, |
|
"learning_rate": 0.0, |
|
"loss": 6.6798, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0015515903801396431, |
|
"grad_norm": 19.460369110107422, |
|
"learning_rate": 4.112808460634547e-08, |
|
"loss": 6.1418, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0023273855702094647, |
|
"grad_norm": 38.97050857543945, |
|
"learning_rate": 8.225616921269094e-08, |
|
"loss": 6.8984, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0031031807602792862, |
|
"grad_norm": 25.763484954833984, |
|
"learning_rate": 1.2338425381903642e-07, |
|
"loss": 7.1881, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.003878975950349108, |
|
"grad_norm": 11.17786979675293, |
|
"learning_rate": 1.6451233842538187e-07, |
|
"loss": 5.5114, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.004654771140418929, |
|
"grad_norm": 15.640860557556152, |
|
"learning_rate": 2.0564042303172737e-07, |
|
"loss": 6.2131, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.005430566330488751, |
|
"grad_norm": 26.026487350463867, |
|
"learning_rate": 2.4676850763807285e-07, |
|
"loss": 7.43, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0062063615205585725, |
|
"grad_norm": Infinity, |
|
"learning_rate": 2.4676850763807285e-07, |
|
"loss": 9.8237, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0069821567106283944, |
|
"grad_norm": 12.019311904907227, |
|
"learning_rate": 2.878965922444183e-07, |
|
"loss": 5.4554, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.007757951900698216, |
|
"grad_norm": 19.58979606628418, |
|
"learning_rate": 3.2902467685076374e-07, |
|
"loss": 6.6825, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.008533747090768037, |
|
"grad_norm": 11.048348426818848, |
|
"learning_rate": 3.701527614571093e-07, |
|
"loss": 5.6752, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.009309542280837859, |
|
"grad_norm": 10.552579879760742, |
|
"learning_rate": 4.1128084606345474e-07, |
|
"loss": 4.8126, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01008533747090768, |
|
"grad_norm": 10.90353775024414, |
|
"learning_rate": 4.524089306698002e-07, |
|
"loss": 4.7993, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.010861132660977503, |
|
"grad_norm": 14.389288902282715, |
|
"learning_rate": 4.935370152761457e-07, |
|
"loss": 6.2433, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.011636927851047323, |
|
"grad_norm": 32.469276428222656, |
|
"learning_rate": 5.346650998824911e-07, |
|
"loss": 7.8983, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.012412723041117145, |
|
"grad_norm": 17.988374710083008, |
|
"learning_rate": 5.757931844888366e-07, |
|
"loss": 5.9294, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.013188518231186967, |
|
"grad_norm": 53.49104309082031, |
|
"learning_rate": 6.169212690951821e-07, |
|
"loss": 10.0069, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.013964313421256789, |
|
"grad_norm": 51.57087707519531, |
|
"learning_rate": 6.580493537015275e-07, |
|
"loss": 9.761, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01474010861132661, |
|
"grad_norm": 10.312777519226074, |
|
"learning_rate": 6.99177438307873e-07, |
|
"loss": 4.7492, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.015515903801396431, |
|
"grad_norm": 24.298805236816406, |
|
"learning_rate": 7.403055229142186e-07, |
|
"loss": 7.0924, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015515903801396431, |
|
"eval_Qnli-dev_cosine_accuracy": 0.58984375, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9331285953521729, |
|
"eval_Qnli-dev_cosine_ap": 0.5485925209297758, |
|
"eval_Qnli-dev_cosine_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.6565657258033752, |
|
"eval_Qnli-dev_cosine_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_cosine_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_dot_accuracy": 0.541015625, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 1048.2359619140625, |
|
"eval_Qnli-dev_dot_ap": 0.4748490344531905, |
|
"eval_Qnli-dev_dot_f1": 0.6300268096514745, |
|
"eval_Qnli-dev_dot_f1_threshold": 380.322998046875, |
|
"eval_Qnli-dev_dot_precision": 0.46078431372549017, |
|
"eval_Qnli-dev_dot_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.587890625, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 9.073010444641113, |
|
"eval_Qnli-dev_euclidean_ap": 0.5650640204703478, |
|
"eval_Qnli-dev_euclidean_f1": 0.6332882273342355, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 18.225353240966797, |
|
"eval_Qnli-dev_euclidean_precision": 0.46520874751491054, |
|
"eval_Qnli-dev_euclidean_recall": 0.9915254237288136, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.6171875, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 155.41647338867188, |
|
"eval_Qnli-dev_manhattan_ap": 0.6018518855824669, |
|
"eval_Qnli-dev_manhattan_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 286.5093994140625, |
|
"eval_Qnli-dev_manhattan_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_manhattan_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_max_accuracy": 0.6171875, |
|
"eval_Qnli-dev_max_accuracy_threshold": 1048.2359619140625, |
|
"eval_Qnli-dev_max_ap": 0.6018518855824669, |
|
"eval_Qnli-dev_max_f1": 0.6332882273342355, |
|
"eval_Qnli-dev_max_f1_threshold": 380.322998046875, |
|
"eval_Qnli-dev_max_precision": 0.46520874751491054, |
|
"eval_Qnli-dev_max_recall": 0.9957627118644068, |
|
"eval_allNLI-dev_cosine_accuracy": 0.666015625, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9902692437171936, |
|
"eval_allNLI-dev_cosine_ap": 0.36630271296437167, |
|
"eval_allNLI-dev_cosine_f1": 0.5088235294117648, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.7946319580078125, |
|
"eval_allNLI-dev_cosine_precision": 0.34122287968441817, |
|
"eval_allNLI-dev_cosine_recall": 1.0, |
|
"eval_allNLI-dev_dot_accuracy": 0.662109375, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 940.7398681640625, |
|
"eval_allNLI-dev_dot_ap": 0.33409368825504626, |
|
"eval_allNLI-dev_dot_f1": 0.5081240768094535, |
|
"eval_allNLI-dev_dot_f1_threshold": 585.4312744140625, |
|
"eval_allNLI-dev_dot_precision": 0.3412698412698413, |
|
"eval_allNLI-dev_dot_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.666015625, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 5.293120384216309, |
|
"eval_allNLI-dev_euclidean_ap": 0.36165210170894113, |
|
"eval_allNLI-dev_euclidean_f1": 0.5111441307578009, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 16.931983947753906, |
|
"eval_allNLI-dev_euclidean_precision": 0.344, |
|
"eval_allNLI-dev_euclidean_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.666015625, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 74.05484008789062, |
|
"eval_allNLI-dev_manhattan_ap": 0.37228608061825896, |
|
"eval_allNLI-dev_manhattan_f1": 0.5081240768094535, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 240.91050720214844, |
|
"eval_allNLI-dev_manhattan_precision": 0.3412698412698413, |
|
"eval_allNLI-dev_manhattan_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_max_accuracy": 0.666015625, |
|
"eval_allNLI-dev_max_accuracy_threshold": 940.7398681640625, |
|
"eval_allNLI-dev_max_ap": 0.37228608061825896, |
|
"eval_allNLI-dev_max_f1": 0.5111441307578009, |
|
"eval_allNLI-dev_max_f1_threshold": 585.4312744140625, |
|
"eval_allNLI-dev_max_precision": 0.344, |
|
"eval_allNLI-dev_max_recall": 1.0, |
|
"eval_sequential_score": 0.6018518855824669, |
|
"eval_sts-test_pearson_cosine": 0.14468011325378688, |
|
"eval_sts-test_pearson_dot": 0.12373166012012136, |
|
"eval_sts-test_pearson_euclidean": 0.15040853567589774, |
|
"eval_sts-test_pearson_manhattan": 0.14680735741048356, |
|
"eval_sts-test_pearson_max": 0.15040853567589774, |
|
"eval_sts-test_spearman_cosine": 0.1980088477205014, |
|
"eval_sts-test_spearman_dot": 0.12132071438334546, |
|
"eval_sts-test_spearman_euclidean": 0.17802306863688658, |
|
"eval_sts-test_spearman_manhattan": 0.17413140944376768, |
|
"eval_sts-test_spearman_max": 0.1980088477205014, |
|
"eval_vitaminc-pairs_loss": 4.239284515380859, |
|
"eval_vitaminc-pairs_runtime": 5.4781, |
|
"eval_vitaminc-pairs_samples_per_second": 23.366, |
|
"eval_vitaminc-pairs_steps_per_second": 0.365, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015515903801396431, |
|
"eval_negation-triplets_loss": 5.499993801116943, |
|
"eval_negation-triplets_runtime": 1.0772, |
|
"eval_negation-triplets_samples_per_second": 118.828, |
|
"eval_negation-triplets_steps_per_second": 1.857, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015515903801396431, |
|
"eval_scitail-pairs-pos_loss": 2.806995391845703, |
|
"eval_scitail-pairs-pos_runtime": 1.221, |
|
"eval_scitail-pairs-pos_samples_per_second": 104.828, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.638, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015515903801396431, |
|
"eval_scitail-pairs-qa_loss": 4.214225769042969, |
|
"eval_scitail-pairs-qa_runtime": 0.7726, |
|
"eval_scitail-pairs-qa_samples_per_second": 165.67, |
|
"eval_scitail-pairs-qa_steps_per_second": 2.589, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015515903801396431, |
|
"eval_xsum-pairs_loss": 5.982180118560791, |
|
"eval_xsum-pairs_runtime": 3.9493, |
|
"eval_xsum-pairs_samples_per_second": 32.411, |
|
"eval_xsum-pairs_steps_per_second": 0.506, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015515903801396431, |
|
"eval_sciq_pairs_loss": 0.733872652053833, |
|
"eval_sciq_pairs_runtime": 5.8101, |
|
"eval_sciq_pairs_samples_per_second": 22.031, |
|
"eval_sciq_pairs_steps_per_second": 0.344, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015515903801396431, |
|
"eval_qasc_pairs_loss": 8.005105018615723, |
|
"eval_qasc_pairs_runtime": 0.89, |
|
"eval_qasc_pairs_samples_per_second": 143.822, |
|
"eval_qasc_pairs_steps_per_second": 2.247, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015515903801396431, |
|
"eval_openbookqa_pairs_loss": 5.631566047668457, |
|
"eval_openbookqa_pairs_runtime": 0.861, |
|
"eval_openbookqa_pairs_samples_per_second": 148.656, |
|
"eval_openbookqa_pairs_steps_per_second": 2.323, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015515903801396431, |
|
"eval_msmarco_pairs_loss": 10.92141342163086, |
|
"eval_msmarco_pairs_runtime": 1.9495, |
|
"eval_msmarco_pairs_samples_per_second": 65.659, |
|
"eval_msmarco_pairs_steps_per_second": 1.026, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015515903801396431, |
|
"eval_nq_pairs_loss": 9.838966369628906, |
|
"eval_nq_pairs_runtime": 3.3145, |
|
"eval_nq_pairs_samples_per_second": 38.618, |
|
"eval_nq_pairs_steps_per_second": 0.603, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015515903801396431, |
|
"eval_trivia_pairs_loss": 7.321401596069336, |
|
"eval_trivia_pairs_runtime": 4.4673, |
|
"eval_trivia_pairs_samples_per_second": 28.653, |
|
"eval_trivia_pairs_steps_per_second": 0.448, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015515903801396431, |
|
"eval_gooaq_pairs_loss": 7.844480037689209, |
|
"eval_gooaq_pairs_runtime": 1.4628, |
|
"eval_gooaq_pairs_samples_per_second": 87.505, |
|
"eval_gooaq_pairs_steps_per_second": 1.367, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015515903801396431, |
|
"eval_paws-pos_loss": 2.474581718444824, |
|
"eval_paws-pos_runtime": 1.0227, |
|
"eval_paws-pos_samples_per_second": 125.159, |
|
"eval_paws-pos_steps_per_second": 1.956, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015515903801396431, |
|
"eval_global_dataset_loss": 5.820813179016113, |
|
"eval_global_dataset_runtime": 19.3774, |
|
"eval_global_dataset_samples_per_second": 21.468, |
|
"eval_global_dataset_steps_per_second": 0.361, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.016291698991466253, |
|
"grad_norm": 16.32306671142578, |
|
"learning_rate": 7.81433607520564e-07, |
|
"loss": 6.3011, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.017067494181536073, |
|
"grad_norm": 10.361891746520996, |
|
"learning_rate": 8.225616921269095e-07, |
|
"loss": 5.6518, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.017843289371605897, |
|
"grad_norm": 18.645973205566406, |
|
"learning_rate": 8.636897767332549e-07, |
|
"loss": 6.5445, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.018619084561675717, |
|
"grad_norm": 14.296663284301758, |
|
"learning_rate": 9.048178613396004e-07, |
|
"loss": 6.0762, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.019394879751745538, |
|
"grad_norm": 12.288186073303223, |
|
"learning_rate": 9.459459459459459e-07, |
|
"loss": 4.9043, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02017067494181536, |
|
"grad_norm": 48.58110046386719, |
|
"learning_rate": 9.870740305522914e-07, |
|
"loss": 10.3442, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02094647013188518, |
|
"grad_norm": 17.945547103881836, |
|
"learning_rate": 1.0282021151586369e-06, |
|
"loss": 6.3305, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.021722265321955005, |
|
"grad_norm": 16.731460571289062, |
|
"learning_rate": 1.0693301997649822e-06, |
|
"loss": 6.3366, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.022498060512024826, |
|
"grad_norm": 10.825992584228516, |
|
"learning_rate": 1.1104582843713277e-06, |
|
"loss": 5.6012, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.023273855702094646, |
|
"grad_norm": 13.152975082397461, |
|
"learning_rate": 1.1515863689776732e-06, |
|
"loss": 5.9001, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02404965089216447, |
|
"grad_norm": 17.892440795898438, |
|
"learning_rate": 1.1927144535840187e-06, |
|
"loss": 6.3815, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02482544608223429, |
|
"grad_norm": 10.021342277526855, |
|
"learning_rate": 1.2338425381903642e-06, |
|
"loss": 4.8941, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02560124127230411, |
|
"grad_norm": 17.672340393066406, |
|
"learning_rate": 1.2749706227967097e-06, |
|
"loss": 6.2971, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.026377036462373934, |
|
"grad_norm": 13.532915115356445, |
|
"learning_rate": 1.316098707403055e-06, |
|
"loss": 5.6313, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.027152831652443754, |
|
"grad_norm": 13.363771438598633, |
|
"learning_rate": 1.3572267920094007e-06, |
|
"loss": 5.6635, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.027928626842513578, |
|
"grad_norm": 11.951986312866211, |
|
"learning_rate": 1.398354876615746e-06, |
|
"loss": 6.0632, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.028704422032583398, |
|
"grad_norm": 12.715692520141602, |
|
"learning_rate": 1.4394829612220915e-06, |
|
"loss": 6.0125, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02948021722265322, |
|
"grad_norm": 44.77717208862305, |
|
"learning_rate": 1.4806110458284372e-06, |
|
"loss": 7.9108, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.030256012412723042, |
|
"grad_norm": 16.372045516967773, |
|
"learning_rate": 1.5217391304347825e-06, |
|
"loss": 6.3741, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.031031807602792862, |
|
"grad_norm": 10.656290054321289, |
|
"learning_rate": 1.562867215041128e-06, |
|
"loss": 5.3981, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.031031807602792862, |
|
"eval_Qnli-dev_cosine_accuracy": 0.6015625, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9430716633796692, |
|
"eval_Qnli-dev_cosine_ap": 0.5565875119399253, |
|
"eval_Qnli-dev_cosine_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.6778229475021362, |
|
"eval_Qnli-dev_cosine_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_cosine_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_dot_accuracy": 0.548828125, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 709.1221923828125, |
|
"eval_Qnli-dev_dot_ap": 0.47478000038083695, |
|
"eval_Qnli-dev_dot_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_dot_f1_threshold": 357.37890625, |
|
"eval_Qnli-dev_dot_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_dot_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.599609375, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 8.403730392456055, |
|
"eval_Qnli-dev_euclidean_ap": 0.5735439175412295, |
|
"eval_Qnli-dev_euclidean_f1": 0.6324324324324324, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 17.377689361572266, |
|
"eval_Qnli-dev_euclidean_precision": 0.4642857142857143, |
|
"eval_Qnli-dev_euclidean_recall": 0.9915254237288136, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.625, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 152.123046875, |
|
"eval_Qnli-dev_manhattan_ap": 0.6110278070440811, |
|
"eval_Qnli-dev_manhattan_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 275.72344970703125, |
|
"eval_Qnli-dev_manhattan_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_manhattan_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_max_accuracy": 0.625, |
|
"eval_Qnli-dev_max_accuracy_threshold": 709.1221923828125, |
|
"eval_Qnli-dev_max_ap": 0.6110278070440811, |
|
"eval_Qnli-dev_max_f1": 0.6324324324324324, |
|
"eval_Qnli-dev_max_f1_threshold": 357.37890625, |
|
"eval_Qnli-dev_max_precision": 0.4642857142857143, |
|
"eval_Qnli-dev_max_recall": 0.9957627118644068, |
|
"eval_allNLI-dev_cosine_accuracy": 0.666015625, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9908591508865356, |
|
"eval_allNLI-dev_cosine_ap": 0.3677090943014154, |
|
"eval_allNLI-dev_cosine_f1": 0.5088235294117648, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.8096739053726196, |
|
"eval_allNLI-dev_cosine_precision": 0.34122287968441817, |
|
"eval_allNLI-dev_cosine_recall": 1.0, |
|
"eval_allNLI-dev_dot_accuracy": 0.66015625, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 1065.9342041015625, |
|
"eval_allNLI-dev_dot_ap": 0.33310258182405583, |
|
"eval_allNLI-dev_dot_f1": 0.5081240768094535, |
|
"eval_allNLI-dev_dot_f1_threshold": 600.9107666015625, |
|
"eval_allNLI-dev_dot_precision": 0.3412698412698413, |
|
"eval_allNLI-dev_dot_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.666015625, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 5.249004364013672, |
|
"eval_allNLI-dev_euclidean_ap": 0.36405301502121135, |
|
"eval_allNLI-dev_euclidean_f1": 0.5103857566765578, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 16.79437828063965, |
|
"eval_allNLI-dev_euclidean_precision": 0.34331337325349304, |
|
"eval_allNLI-dev_euclidean_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.666015625, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 71.94633483886719, |
|
"eval_allNLI-dev_manhattan_ap": 0.3767657897123271, |
|
"eval_allNLI-dev_manhattan_f1": 0.5073746312684366, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 236.0598602294922, |
|
"eval_allNLI-dev_manhattan_precision": 0.3405940594059406, |
|
"eval_allNLI-dev_manhattan_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_max_accuracy": 0.666015625, |
|
"eval_allNLI-dev_max_accuracy_threshold": 1065.9342041015625, |
|
"eval_allNLI-dev_max_ap": 0.3767657897123271, |
|
"eval_allNLI-dev_max_f1": 0.5103857566765578, |
|
"eval_allNLI-dev_max_f1_threshold": 600.9107666015625, |
|
"eval_allNLI-dev_max_precision": 0.34331337325349304, |
|
"eval_allNLI-dev_max_recall": 1.0, |
|
"eval_sequential_score": 0.6110278070440811, |
|
"eval_sts-test_pearson_cosine": 0.14752508082997276, |
|
"eval_sts-test_pearson_dot": 0.13488717804142986, |
|
"eval_sts-test_pearson_euclidean": 0.15185506014664446, |
|
"eval_sts-test_pearson_manhattan": 0.14687917919770682, |
|
"eval_sts-test_pearson_max": 0.15185506014664446, |
|
"eval_sts-test_spearman_cosine": 0.20049724161017018, |
|
"eval_sts-test_spearman_dot": 0.1353592298330889, |
|
"eval_sts-test_spearman_euclidean": 0.17951910330520107, |
|
"eval_sts-test_spearman_manhattan": 0.17376785019622396, |
|
"eval_sts-test_spearman_max": 0.20049724161017018, |
|
"eval_vitaminc-pairs_loss": 4.123702049255371, |
|
"eval_vitaminc-pairs_runtime": 5.4736, |
|
"eval_vitaminc-pairs_samples_per_second": 23.385, |
|
"eval_vitaminc-pairs_steps_per_second": 0.365, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.031031807602792862, |
|
"eval_negation-triplets_loss": 5.45994758605957, |
|
"eval_negation-triplets_runtime": 1.0747, |
|
"eval_negation-triplets_samples_per_second": 119.102, |
|
"eval_negation-triplets_steps_per_second": 1.861, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.031031807602792862, |
|
"eval_scitail-pairs-pos_loss": 2.758348226547241, |
|
"eval_scitail-pairs-pos_runtime": 1.2143, |
|
"eval_scitail-pairs-pos_samples_per_second": 105.41, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.647, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.031031807602792862, |
|
"eval_scitail-pairs-qa_loss": 4.180817127227783, |
|
"eval_scitail-pairs-qa_runtime": 0.766, |
|
"eval_scitail-pairs-qa_samples_per_second": 167.105, |
|
"eval_scitail-pairs-qa_steps_per_second": 2.611, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.031031807602792862, |
|
"eval_xsum-pairs_loss": 5.874823093414307, |
|
"eval_xsum-pairs_runtime": 3.924, |
|
"eval_xsum-pairs_samples_per_second": 32.62, |
|
"eval_xsum-pairs_steps_per_second": 0.51, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.031031807602792862, |
|
"eval_sciq_pairs_loss": 0.6976423859596252, |
|
"eval_sciq_pairs_runtime": 5.8211, |
|
"eval_sciq_pairs_samples_per_second": 21.989, |
|
"eval_sciq_pairs_steps_per_second": 0.344, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.031031807602792862, |
|
"eval_qasc_pairs_loss": 7.423002243041992, |
|
"eval_qasc_pairs_runtime": 0.9034, |
|
"eval_qasc_pairs_samples_per_second": 141.693, |
|
"eval_qasc_pairs_steps_per_second": 2.214, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.031031807602792862, |
|
"eval_openbookqa_pairs_loss": 5.564730644226074, |
|
"eval_openbookqa_pairs_runtime": 0.8569, |
|
"eval_openbookqa_pairs_samples_per_second": 149.376, |
|
"eval_openbookqa_pairs_steps_per_second": 2.334, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.031031807602792862, |
|
"eval_msmarco_pairs_loss": 10.023717880249023, |
|
"eval_msmarco_pairs_runtime": 1.9602, |
|
"eval_msmarco_pairs_samples_per_second": 65.298, |
|
"eval_msmarco_pairs_steps_per_second": 1.02, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.031031807602792862, |
|
"eval_nq_pairs_loss": 8.906123161315918, |
|
"eval_nq_pairs_runtime": 3.3287, |
|
"eval_nq_pairs_samples_per_second": 38.453, |
|
"eval_nq_pairs_steps_per_second": 0.601, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.031031807602792862, |
|
"eval_trivia_pairs_loss": 6.981179714202881, |
|
"eval_trivia_pairs_runtime": 4.4701, |
|
"eval_trivia_pairs_samples_per_second": 28.635, |
|
"eval_trivia_pairs_steps_per_second": 0.447, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.031031807602792862, |
|
"eval_gooaq_pairs_loss": 7.387185096740723, |
|
"eval_gooaq_pairs_runtime": 1.453, |
|
"eval_gooaq_pairs_samples_per_second": 88.091, |
|
"eval_gooaq_pairs_steps_per_second": 1.376, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.031031807602792862, |
|
"eval_paws-pos_loss": 2.5774118900299072, |
|
"eval_paws-pos_runtime": 1.0228, |
|
"eval_paws-pos_samples_per_second": 125.149, |
|
"eval_paws-pos_steps_per_second": 1.955, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.031031807602792862, |
|
"eval_global_dataset_loss": 5.574436187744141, |
|
"eval_global_dataset_runtime": 19.3888, |
|
"eval_global_dataset_samples_per_second": 21.456, |
|
"eval_global_dataset_steps_per_second": 0.361, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03180760279286268, |
|
"grad_norm": 10.962615966796875, |
|
"learning_rate": 1.6039952996474733e-06, |
|
"loss": 5.3344, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.032583397982932506, |
|
"grad_norm": 14.594836235046387, |
|
"learning_rate": 1.645123384253819e-06, |
|
"loss": 6.1365, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03335919317300233, |
|
"grad_norm": 11.795042037963867, |
|
"learning_rate": 1.6862514688601645e-06, |
|
"loss": 5.818, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.03413498836307215, |
|
"grad_norm": 12.332319259643555, |
|
"learning_rate": 1.7273795534665098e-06, |
|
"loss": 5.8133, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03491078355314197, |
|
"grad_norm": 14.156538963317871, |
|
"learning_rate": 1.7685076380728553e-06, |
|
"loss": 5.9534, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.035686578743211794, |
|
"grad_norm": 11.36040210723877, |
|
"learning_rate": 1.8096357226792008e-06, |
|
"loss": 5.6302, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.03646237393328161, |
|
"grad_norm": 12.837271690368652, |
|
"learning_rate": 1.8507638072855463e-06, |
|
"loss": 5.9389, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.037238169123351435, |
|
"grad_norm": 13.556758880615234, |
|
"learning_rate": 1.8918918918918918e-06, |
|
"loss": 5.7893, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03801396431342126, |
|
"grad_norm": 9.904508590698242, |
|
"learning_rate": 1.933019976498237e-06, |
|
"loss": 4.725, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.038789759503491075, |
|
"grad_norm": 13.982979774475098, |
|
"learning_rate": 1.9741480611045828e-06, |
|
"loss": 6.1877, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0395655546935609, |
|
"grad_norm": 13.776800155639648, |
|
"learning_rate": 2.015276145710928e-06, |
|
"loss": 6.001, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04034134988363072, |
|
"grad_norm": 21.959096908569336, |
|
"learning_rate": 2.0564042303172738e-06, |
|
"loss": 7.0504, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.041117145073700546, |
|
"grad_norm": 14.812617301940918, |
|
"learning_rate": 2.097532314923619e-06, |
|
"loss": 5.7706, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.04189294026377036, |
|
"grad_norm": 13.54603099822998, |
|
"learning_rate": 2.1386603995299644e-06, |
|
"loss": 6.3886, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.04266873545384019, |
|
"grad_norm": 11.500264167785645, |
|
"learning_rate": 2.17978848413631e-06, |
|
"loss": 6.0503, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04344453064391001, |
|
"grad_norm": 7.893326759338379, |
|
"learning_rate": 2.2209165687426554e-06, |
|
"loss": 5.9136, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.04422032583397983, |
|
"grad_norm": 9.733207702636719, |
|
"learning_rate": 2.2620446533490006e-06, |
|
"loss": 5.6769, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.04499612102404965, |
|
"grad_norm": 12.644614219665527, |
|
"learning_rate": 2.3031727379553464e-06, |
|
"loss": 5.6418, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.045771916214119475, |
|
"grad_norm": 8.392154693603516, |
|
"learning_rate": 2.344300822561692e-06, |
|
"loss": 5.3225, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.04654771140418929, |
|
"grad_norm": 6.115021228790283, |
|
"learning_rate": 2.3854289071680374e-06, |
|
"loss": 5.2977, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04654771140418929, |
|
"eval_Qnli-dev_cosine_accuracy": 0.59765625, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9555542469024658, |
|
"eval_Qnli-dev_cosine_ap": 0.5645675307780116, |
|
"eval_Qnli-dev_cosine_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.7179017066955566, |
|
"eval_Qnli-dev_cosine_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_cosine_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_dot_accuracy": 0.55078125, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 727.389404296875, |
|
"eval_Qnli-dev_dot_ap": 0.47243395169886154, |
|
"eval_Qnli-dev_dot_f1": 0.629878869448183, |
|
"eval_Qnli-dev_dot_f1_threshold": 461.4835205078125, |
|
"eval_Qnli-dev_dot_precision": 0.46153846153846156, |
|
"eval_Qnli-dev_dot_recall": 0.9915254237288136, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.599609375, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 8.283002853393555, |
|
"eval_Qnli-dev_euclidean_ap": 0.5826604533188524, |
|
"eval_Qnli-dev_euclidean_f1": 0.6314363143631436, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 16.081790924072266, |
|
"eval_Qnli-dev_euclidean_precision": 0.4641434262948207, |
|
"eval_Qnli-dev_euclidean_recall": 0.9872881355932204, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.630859375, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 146.17575073242188, |
|
"eval_Qnli-dev_manhattan_ap": 0.6168306227629134, |
|
"eval_Qnli-dev_manhattan_f1": 0.6296296296296297, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 204.5635528564453, |
|
"eval_Qnli-dev_manhattan_precision": 0.4742489270386266, |
|
"eval_Qnli-dev_manhattan_recall": 0.9364406779661016, |
|
"eval_Qnli-dev_max_accuracy": 0.630859375, |
|
"eval_Qnli-dev_max_accuracy_threshold": 727.389404296875, |
|
"eval_Qnli-dev_max_ap": 0.6168306227629134, |
|
"eval_Qnli-dev_max_f1": 0.6314363143631436, |
|
"eval_Qnli-dev_max_f1_threshold": 461.4835205078125, |
|
"eval_Qnli-dev_max_precision": 0.4742489270386266, |
|
"eval_Qnli-dev_max_recall": 0.9957627118644068, |
|
"eval_allNLI-dev_cosine_accuracy": 0.666015625, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9849413633346558, |
|
"eval_allNLI-dev_cosine_ap": 0.37307202329042877, |
|
"eval_allNLI-dev_cosine_f1": 0.5104477611940298, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.8623183965682983, |
|
"eval_allNLI-dev_cosine_precision": 0.3440643863179074, |
|
"eval_allNLI-dev_cosine_recall": 0.9884393063583815, |
|
"eval_allNLI-dev_dot_accuracy": 0.66015625, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 1062.3370361328125, |
|
"eval_allNLI-dev_dot_ap": 0.3297699984610426, |
|
"eval_allNLI-dev_dot_f1": 0.5088757396449705, |
|
"eval_allNLI-dev_dot_f1_threshold": 622.9761962890625, |
|
"eval_allNLI-dev_dot_precision": 0.341948310139165, |
|
"eval_allNLI-dev_dot_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.666015625, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 5.127579689025879, |
|
"eval_allNLI-dev_euclidean_ap": 0.36909730374104466, |
|
"eval_allNLI-dev_euclidean_f1": 0.5113464447806354, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 14.735725402832031, |
|
"eval_allNLI-dev_euclidean_precision": 0.3463114754098361, |
|
"eval_allNLI-dev_euclidean_recall": 0.976878612716763, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.6640625, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 54.84022521972656, |
|
"eval_allNLI-dev_manhattan_ap": 0.3817636902039587, |
|
"eval_allNLI-dev_manhattan_f1": 0.5103857566765578, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 225.62132263183594, |
|
"eval_allNLI-dev_manhattan_precision": 0.34331337325349304, |
|
"eval_allNLI-dev_manhattan_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_max_accuracy": 0.666015625, |
|
"eval_allNLI-dev_max_accuracy_threshold": 1062.3370361328125, |
|
"eval_allNLI-dev_max_ap": 0.3817636902039587, |
|
"eval_allNLI-dev_max_f1": 0.5113464447806354, |
|
"eval_allNLI-dev_max_f1_threshold": 622.9761962890625, |
|
"eval_allNLI-dev_max_precision": 0.3463114754098361, |
|
"eval_allNLI-dev_max_recall": 0.9942196531791907, |
|
"eval_sequential_score": 0.6168306227629134, |
|
"eval_sts-test_pearson_cosine": 0.15615571514608637, |
|
"eval_sts-test_pearson_dot": 0.1565591340193878, |
|
"eval_sts-test_pearson_euclidean": 0.15789005051166094, |
|
"eval_sts-test_pearson_manhattan": 0.15049180567530787, |
|
"eval_sts-test_pearson_max": 0.15789005051166094, |
|
"eval_sts-test_spearman_cosine": 0.21185036811759986, |
|
"eval_sts-test_spearman_dot": 0.15568507375201698, |
|
"eval_sts-test_spearman_euclidean": 0.1835967487406626, |
|
"eval_sts-test_spearman_manhattan": 0.17528196437414056, |
|
"eval_sts-test_spearman_max": 0.21185036811759986, |
|
"eval_vitaminc-pairs_loss": 3.992605209350586, |
|
"eval_vitaminc-pairs_runtime": 5.4384, |
|
"eval_vitaminc-pairs_samples_per_second": 23.536, |
|
"eval_vitaminc-pairs_steps_per_second": 0.368, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04654771140418929, |
|
"eval_negation-triplets_loss": 5.40188455581665, |
|
"eval_negation-triplets_runtime": 1.0636, |
|
"eval_negation-triplets_samples_per_second": 120.348, |
|
"eval_negation-triplets_steps_per_second": 1.88, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04654771140418929, |
|
"eval_scitail-pairs-pos_loss": 2.6962366104125977, |
|
"eval_scitail-pairs-pos_runtime": 1.1899, |
|
"eval_scitail-pairs-pos_samples_per_second": 107.569, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.681, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04654771140418929, |
|
"eval_scitail-pairs-qa_loss": 4.189697742462158, |
|
"eval_scitail-pairs-qa_runtime": 0.7567, |
|
"eval_scitail-pairs-qa_samples_per_second": 169.153, |
|
"eval_scitail-pairs-qa_steps_per_second": 2.643, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04654771140418929, |
|
"eval_xsum-pairs_loss": 5.7225446701049805, |
|
"eval_xsum-pairs_runtime": 3.9013, |
|
"eval_xsum-pairs_samples_per_second": 32.809, |
|
"eval_xsum-pairs_steps_per_second": 0.513, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04654771140418929, |
|
"eval_sciq_pairs_loss": 0.6581735610961914, |
|
"eval_sciq_pairs_runtime": 5.7872, |
|
"eval_sciq_pairs_samples_per_second": 22.118, |
|
"eval_sciq_pairs_steps_per_second": 0.346, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04654771140418929, |
|
"eval_qasc_pairs_loss": 6.56049919128418, |
|
"eval_qasc_pairs_runtime": 0.8954, |
|
"eval_qasc_pairs_samples_per_second": 142.954, |
|
"eval_qasc_pairs_steps_per_second": 2.234, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04654771140418929, |
|
"eval_openbookqa_pairs_loss": 5.458502292633057, |
|
"eval_openbookqa_pairs_runtime": 0.8618, |
|
"eval_openbookqa_pairs_samples_per_second": 148.532, |
|
"eval_openbookqa_pairs_steps_per_second": 2.321, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04654771140418929, |
|
"eval_msmarco_pairs_loss": 8.660462379455566, |
|
"eval_msmarco_pairs_runtime": 1.9838, |
|
"eval_msmarco_pairs_samples_per_second": 64.521, |
|
"eval_msmarco_pairs_steps_per_second": 1.008, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04654771140418929, |
|
"eval_nq_pairs_loss": 7.642305850982666, |
|
"eval_nq_pairs_runtime": 3.3253, |
|
"eval_nq_pairs_samples_per_second": 38.492, |
|
"eval_nq_pairs_steps_per_second": 0.601, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04654771140418929, |
|
"eval_trivia_pairs_loss": 6.512197494506836, |
|
"eval_trivia_pairs_runtime": 4.4671, |
|
"eval_trivia_pairs_samples_per_second": 28.654, |
|
"eval_trivia_pairs_steps_per_second": 0.448, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04654771140418929, |
|
"eval_gooaq_pairs_loss": 6.7339911460876465, |
|
"eval_gooaq_pairs_runtime": 1.4577, |
|
"eval_gooaq_pairs_samples_per_second": 87.81, |
|
"eval_gooaq_pairs_steps_per_second": 1.372, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04654771140418929, |
|
"eval_paws-pos_loss": 2.735227584838867, |
|
"eval_paws-pos_runtime": 1.0334, |
|
"eval_paws-pos_samples_per_second": 123.864, |
|
"eval_paws-pos_steps_per_second": 1.935, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04654771140418929, |
|
"eval_global_dataset_loss": 5.233480453491211, |
|
"eval_global_dataset_runtime": 19.3785, |
|
"eval_global_dataset_samples_per_second": 21.467, |
|
"eval_global_dataset_steps_per_second": 0.361, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.047323506594259115, |
|
"grad_norm": 17.255535125732422, |
|
"learning_rate": 2.426556991774383e-06, |
|
"loss": 3.5938, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.04809930178432894, |
|
"grad_norm": 8.553607940673828, |
|
"learning_rate": 2.4676850763807284e-06, |
|
"loss": 6.1306, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.048875096974398756, |
|
"grad_norm": 29.715600967407227, |
|
"learning_rate": 2.5088131609870737e-06, |
|
"loss": 8.328, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.04965089216446858, |
|
"grad_norm": 11.584660530090332, |
|
"learning_rate": 2.5499412455934194e-06, |
|
"loss": 6.0765, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.0504266873545384, |
|
"grad_norm": 9.648879051208496, |
|
"learning_rate": 2.5910693301997647e-06, |
|
"loss": 4.808, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.05120248254460822, |
|
"grad_norm": 10.802507400512695, |
|
"learning_rate": 2.63219741480611e-06, |
|
"loss": 5.9628, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.051978277734678044, |
|
"grad_norm": 8.42625904083252, |
|
"learning_rate": 2.6733254994124557e-06, |
|
"loss": 5.197, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.05275407292474787, |
|
"grad_norm": 18.369491577148438, |
|
"learning_rate": 2.7144535840188014e-06, |
|
"loss": 6.7851, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.05352986811481769, |
|
"grad_norm": 16.186237335205078, |
|
"learning_rate": 2.7555816686251467e-06, |
|
"loss": 3.5248, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.05430566330488751, |
|
"grad_norm": 25.963136672973633, |
|
"learning_rate": 2.796709753231492e-06, |
|
"loss": 7.614, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05508145849495733, |
|
"grad_norm": 10.458888053894043, |
|
"learning_rate": 2.8378378378378377e-06, |
|
"loss": 5.9919, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.055857253685027156, |
|
"grad_norm": 22.169750213623047, |
|
"learning_rate": 2.878965922444183e-06, |
|
"loss": 6.5741, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.05663304887509697, |
|
"grad_norm": 8.526782989501953, |
|
"learning_rate": 2.9200940070505282e-06, |
|
"loss": 5.5377, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.057408844065166796, |
|
"grad_norm": 17.269418716430664, |
|
"learning_rate": 2.9612220916568744e-06, |
|
"loss": 6.6046, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.05818463925523662, |
|
"grad_norm": 25.390901565551758, |
|
"learning_rate": 3.0023501762632197e-06, |
|
"loss": 6.6433, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.05896043444530644, |
|
"grad_norm": 9.188263893127441, |
|
"learning_rate": 3.043478260869565e-06, |
|
"loss": 5.347, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.05973622963537626, |
|
"grad_norm": 19.360456466674805, |
|
"learning_rate": 3.0846063454759102e-06, |
|
"loss": 6.3361, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.060512024825446084, |
|
"grad_norm": 16.59157371520996, |
|
"learning_rate": 3.125734430082256e-06, |
|
"loss": 6.672, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.0612878200155159, |
|
"grad_norm": 21.274341583251953, |
|
"learning_rate": 3.1668625146886012e-06, |
|
"loss": 7.2266, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.062063615205585725, |
|
"grad_norm": 7.30432653427124, |
|
"learning_rate": 3.2079905992949465e-06, |
|
"loss": 5.2962, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.062063615205585725, |
|
"eval_Qnli-dev_cosine_accuracy": 0.609375, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9602965116500854, |
|
"eval_Qnli-dev_cosine_ap": 0.5666608532248261, |
|
"eval_Qnli-dev_cosine_f1": 0.6300268096514745, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.8070105314254761, |
|
"eval_Qnli-dev_cosine_precision": 0.46078431372549017, |
|
"eval_Qnli-dev_cosine_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_dot_accuracy": 0.55078125, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 752.1351318359375, |
|
"eval_Qnli-dev_dot_ap": 0.47532727039762024, |
|
"eval_Qnli-dev_dot_f1": 0.6307277628032345, |
|
"eval_Qnli-dev_dot_f1_threshold": 492.98980712890625, |
|
"eval_Qnli-dev_dot_precision": 0.4624505928853755, |
|
"eval_Qnli-dev_dot_recall": 0.9915254237288136, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.609375, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 7.72648811340332, |
|
"eval_Qnli-dev_euclidean_ap": 0.5815079822504383, |
|
"eval_Qnli-dev_euclidean_f1": 0.6300268096514745, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 16.727632522583008, |
|
"eval_Qnli-dev_euclidean_precision": 0.46078431372549017, |
|
"eval_Qnli-dev_euclidean_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.625, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 138.83407592773438, |
|
"eval_Qnli-dev_manhattan_ap": 0.6149910998453122, |
|
"eval_Qnli-dev_manhattan_f1": 0.6338028169014084, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 200.78419494628906, |
|
"eval_Qnli-dev_manhattan_precision": 0.47468354430379744, |
|
"eval_Qnli-dev_manhattan_recall": 0.9533898305084746, |
|
"eval_Qnli-dev_max_accuracy": 0.625, |
|
"eval_Qnli-dev_max_accuracy_threshold": 752.1351318359375, |
|
"eval_Qnli-dev_max_ap": 0.6149910998453122, |
|
"eval_Qnli-dev_max_f1": 0.6338028169014084, |
|
"eval_Qnli-dev_max_f1_threshold": 492.98980712890625, |
|
"eval_Qnli-dev_max_precision": 0.47468354430379744, |
|
"eval_Qnli-dev_max_recall": 0.9957627118644068, |
|
"eval_allNLI-dev_cosine_accuracy": 0.66796875, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9806065559387207, |
|
"eval_allNLI-dev_cosine_ap": 0.3777500660247708, |
|
"eval_allNLI-dev_cosine_f1": 0.5105105105105106, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.8768577575683594, |
|
"eval_allNLI-dev_cosine_precision": 0.3448275862068966, |
|
"eval_allNLI-dev_cosine_recall": 0.9826589595375722, |
|
"eval_allNLI-dev_dot_accuracy": 0.66015625, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 1061.25537109375, |
|
"eval_allNLI-dev_dot_ap": 0.32808659027780296, |
|
"eval_allNLI-dev_dot_f1": 0.5103244837758112, |
|
"eval_allNLI-dev_dot_f1_threshold": 628.6370239257812, |
|
"eval_allNLI-dev_dot_precision": 0.3425742574257426, |
|
"eval_allNLI-dev_dot_recall": 1.0, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.6640625, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 2.8695411682128906, |
|
"eval_allNLI-dev_euclidean_ap": 0.3719354048335051, |
|
"eval_allNLI-dev_euclidean_f1": 0.5098634294385432, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 14.270792007446289, |
|
"eval_allNLI-dev_euclidean_precision": 0.345679012345679, |
|
"eval_allNLI-dev_euclidean_recall": 0.9710982658959537, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.6640625, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 52.462921142578125, |
|
"eval_allNLI-dev_manhattan_ap": 0.385436980166003, |
|
"eval_allNLI-dev_manhattan_f1": 0.5121212121212121, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 210.9059295654297, |
|
"eval_allNLI-dev_manhattan_precision": 0.3470225872689938, |
|
"eval_allNLI-dev_manhattan_recall": 0.976878612716763, |
|
"eval_allNLI-dev_max_accuracy": 0.66796875, |
|
"eval_allNLI-dev_max_accuracy_threshold": 1061.25537109375, |
|
"eval_allNLI-dev_max_ap": 0.385436980166003, |
|
"eval_allNLI-dev_max_f1": 0.5121212121212121, |
|
"eval_allNLI-dev_max_f1_threshold": 628.6370239257812, |
|
"eval_allNLI-dev_max_precision": 0.3470225872689938, |
|
"eval_allNLI-dev_max_recall": 1.0, |
|
"eval_sequential_score": 0.6149910998453122, |
|
"eval_sts-test_pearson_cosine": 0.16481809744211354, |
|
"eval_sts-test_pearson_dot": 0.1687336929448065, |
|
"eval_sts-test_pearson_euclidean": 0.16621273435380343, |
|
"eval_sts-test_pearson_manhattan": 0.15708095069304162, |
|
"eval_sts-test_pearson_max": 0.1687336929448065, |
|
"eval_sts-test_spearman_cosine": 0.22067965773633885, |
|
"eval_sts-test_spearman_dot": 0.16797399388640222, |
|
"eval_sts-test_spearman_euclidean": 0.19304745437508256, |
|
"eval_sts-test_spearman_manhattan": 0.18393475501795184, |
|
"eval_sts-test_spearman_max": 0.22067965773633885, |
|
"eval_vitaminc-pairs_loss": 3.9390244483947754, |
|
"eval_vitaminc-pairs_runtime": 5.4765, |
|
"eval_vitaminc-pairs_samples_per_second": 23.373, |
|
"eval_vitaminc-pairs_steps_per_second": 0.365, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.062063615205585725, |
|
"eval_negation-triplets_loss": 5.378707408905029, |
|
"eval_negation-triplets_runtime": 1.0807, |
|
"eval_negation-triplets_samples_per_second": 118.445, |
|
"eval_negation-triplets_steps_per_second": 1.851, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.062063615205585725, |
|
"eval_scitail-pairs-pos_loss": 2.6731348037719727, |
|
"eval_scitail-pairs-pos_runtime": 1.2466, |
|
"eval_scitail-pairs-pos_samples_per_second": 102.681, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.604, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.062063615205585725, |
|
"eval_scitail-pairs-qa_loss": 4.19725227355957, |
|
"eval_scitail-pairs-qa_runtime": 0.7667, |
|
"eval_scitail-pairs-qa_samples_per_second": 166.945, |
|
"eval_scitail-pairs-qa_steps_per_second": 2.609, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.062063615205585725, |
|
"eval_xsum-pairs_loss": 5.497353553771973, |
|
"eval_xsum-pairs_runtime": 3.9143, |
|
"eval_xsum-pairs_samples_per_second": 32.701, |
|
"eval_xsum-pairs_steps_per_second": 0.511, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.062063615205585725, |
|
"eval_sciq_pairs_loss": 0.6366308927536011, |
|
"eval_sciq_pairs_runtime": 5.8407, |
|
"eval_sciq_pairs_samples_per_second": 21.915, |
|
"eval_sciq_pairs_steps_per_second": 0.342, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.062063615205585725, |
|
"eval_qasc_pairs_loss": 5.937916278839111, |
|
"eval_qasc_pairs_runtime": 0.9043, |
|
"eval_qasc_pairs_samples_per_second": 141.552, |
|
"eval_qasc_pairs_steps_per_second": 2.212, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.062063615205585725, |
|
"eval_openbookqa_pairs_loss": 5.383679389953613, |
|
"eval_openbookqa_pairs_runtime": 0.8569, |
|
"eval_openbookqa_pairs_samples_per_second": 149.38, |
|
"eval_openbookqa_pairs_steps_per_second": 2.334, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.062063615205585725, |
|
"eval_msmarco_pairs_loss": 7.691795825958252, |
|
"eval_msmarco_pairs_runtime": 1.9631, |
|
"eval_msmarco_pairs_samples_per_second": 65.203, |
|
"eval_msmarco_pairs_steps_per_second": 1.019, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.062063615205585725, |
|
"eval_nq_pairs_loss": 6.876422882080078, |
|
"eval_nq_pairs_runtime": 3.3205, |
|
"eval_nq_pairs_samples_per_second": 38.549, |
|
"eval_nq_pairs_steps_per_second": 0.602, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.062063615205585725, |
|
"eval_trivia_pairs_loss": 6.1294965744018555, |
|
"eval_trivia_pairs_runtime": 4.4744, |
|
"eval_trivia_pairs_samples_per_second": 28.607, |
|
"eval_trivia_pairs_steps_per_second": 0.447, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.062063615205585725, |
|
"eval_gooaq_pairs_loss": 6.205443859100342, |
|
"eval_gooaq_pairs_runtime": 1.4558, |
|
"eval_gooaq_pairs_samples_per_second": 87.923, |
|
"eval_gooaq_pairs_steps_per_second": 1.374, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.062063615205585725, |
|
"eval_paws-pos_loss": 2.7808141708374023, |
|
"eval_paws-pos_runtime": 1.0141, |
|
"eval_paws-pos_samples_per_second": 126.225, |
|
"eval_paws-pos_steps_per_second": 1.972, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.062063615205585725, |
|
"eval_global_dataset_loss": 5.0193328857421875, |
|
"eval_global_dataset_runtime": 19.3707, |
|
"eval_global_dataset_samples_per_second": 21.476, |
|
"eval_global_dataset_steps_per_second": 0.361, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06283941039565555, |
|
"grad_norm": 17.17608070373535, |
|
"learning_rate": 3.2491186839012927e-06, |
|
"loss": 6.1576, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.06361520558572537, |
|
"grad_norm": 22.93490982055664, |
|
"learning_rate": 3.290246768507638e-06, |
|
"loss": 7.3243, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.0643910007757952, |
|
"grad_norm": 7.930976867675781, |
|
"learning_rate": 3.3313748531139832e-06, |
|
"loss": 5.4146, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.06516679596586501, |
|
"grad_norm": 9.221315383911133, |
|
"learning_rate": 3.372502937720329e-06, |
|
"loss": 5.8422, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.06594259115593483, |
|
"grad_norm": 16.351512908935547, |
|
"learning_rate": 3.4136310223266742e-06, |
|
"loss": 3.7286, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.06671838634600466, |
|
"grad_norm": 6.853544235229492, |
|
"learning_rate": 3.4547591069330195e-06, |
|
"loss": 5.478, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.06749418153607448, |
|
"grad_norm": 7.5860700607299805, |
|
"learning_rate": 3.495887191539365e-06, |
|
"loss": 5.7321, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.0682699767261443, |
|
"grad_norm": 16.70624351501465, |
|
"learning_rate": 3.5370152761457105e-06, |
|
"loss": 3.6133, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.06904577191621412, |
|
"grad_norm": 20.827497482299805, |
|
"learning_rate": 3.5781433607520563e-06, |
|
"loss": 7.1329, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.06982156710628394, |
|
"grad_norm": 22.313615798950195, |
|
"learning_rate": 3.6192714453584015e-06, |
|
"loss": 7.1766, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07059736229635376, |
|
"grad_norm": 12.5264892578125, |
|
"learning_rate": 3.6603995299647473e-06, |
|
"loss": 5.9805, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.07137315748642359, |
|
"grad_norm": 5.0581955909729, |
|
"learning_rate": 3.7015276145710925e-06, |
|
"loss": 5.797, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.0721489526764934, |
|
"grad_norm": 11.093539237976074, |
|
"learning_rate": 3.742655699177438e-06, |
|
"loss": 6.2104, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.07292474786656322, |
|
"grad_norm": 18.35002326965332, |
|
"learning_rate": 3.7837837837837835e-06, |
|
"loss": 6.7904, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.07370054305663305, |
|
"grad_norm": 4.62692928314209, |
|
"learning_rate": 3.824911868390129e-06, |
|
"loss": 5.6134, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.07447633824670287, |
|
"grad_norm": 5.769094944000244, |
|
"learning_rate": 3.866039952996474e-06, |
|
"loss": 5.5922, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.07525213343677269, |
|
"grad_norm": 6.5628461837768555, |
|
"learning_rate": 3.907168037602819e-06, |
|
"loss": 5.0351, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.07602792862684252, |
|
"grad_norm": 18.365070343017578, |
|
"learning_rate": 3.9482961222091655e-06, |
|
"loss": 7.0271, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.07680372381691233, |
|
"grad_norm": 12.308566093444824, |
|
"learning_rate": 3.989424206815511e-06, |
|
"loss": 6.2418, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.07757951900698215, |
|
"grad_norm": 15.452849388122559, |
|
"learning_rate": 4.030552291421856e-06, |
|
"loss": 6.6114, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07757951900698215, |
|
"eval_Qnli-dev_cosine_accuracy": 0.6015625, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9667960405349731, |
|
"eval_Qnli-dev_cosine_ap": 0.5703877819293376, |
|
"eval_Qnli-dev_cosine_f1": 0.6317204301075269, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.8393322229385376, |
|
"eval_Qnli-dev_cosine_precision": 0.4625984251968504, |
|
"eval_Qnli-dev_cosine_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_dot_accuracy": 0.546875, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 798.5418701171875, |
|
"eval_Qnli-dev_dot_ap": 0.47597628614259624, |
|
"eval_Qnli-dev_dot_f1": 0.6307277628032345, |
|
"eval_Qnli-dev_dot_f1_threshold": 552.041015625, |
|
"eval_Qnli-dev_dot_precision": 0.4624505928853755, |
|
"eval_Qnli-dev_dot_recall": 0.9915254237288136, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.603515625, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 7.820992469787598, |
|
"eval_Qnli-dev_euclidean_ap": 0.5870442553034873, |
|
"eval_Qnli-dev_euclidean_f1": 0.6300268096514745, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 16.982101440429688, |
|
"eval_Qnli-dev_euclidean_precision": 0.46078431372549017, |
|
"eval_Qnli-dev_euclidean_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.62109375, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 138.17556762695312, |
|
"eval_Qnli-dev_manhattan_ap": 0.613470320794373, |
|
"eval_Qnli-dev_manhattan_f1": 0.6304347826086957, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 210.9376220703125, |
|
"eval_Qnli-dev_manhattan_precision": 0.464, |
|
"eval_Qnli-dev_manhattan_recall": 0.9830508474576272, |
|
"eval_Qnli-dev_max_accuracy": 0.62109375, |
|
"eval_Qnli-dev_max_accuracy_threshold": 798.5418701171875, |
|
"eval_Qnli-dev_max_ap": 0.613470320794373, |
|
"eval_Qnli-dev_max_f1": 0.6317204301075269, |
|
"eval_Qnli-dev_max_f1_threshold": 552.041015625, |
|
"eval_Qnli-dev_max_precision": 0.464, |
|
"eval_Qnli-dev_max_recall": 0.9957627118644068, |
|
"eval_allNLI-dev_cosine_accuracy": 0.669921875, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9819352626800537, |
|
"eval_allNLI-dev_cosine_ap": 0.3832808530001383, |
|
"eval_allNLI-dev_cosine_f1": 0.5082706766917294, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.8935015201568604, |
|
"eval_allNLI-dev_cosine_precision": 0.3434959349593496, |
|
"eval_allNLI-dev_cosine_recall": 0.976878612716763, |
|
"eval_allNLI-dev_dot_accuracy": 0.66015625, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 1097.2392578125, |
|
"eval_allNLI-dev_dot_ap": 0.32480014044358735, |
|
"eval_allNLI-dev_dot_f1": 0.5095729013254787, |
|
"eval_allNLI-dev_dot_f1_threshold": 669.5403442382812, |
|
"eval_allNLI-dev_dot_precision": 0.34189723320158105, |
|
"eval_allNLI-dev_dot_recall": 1.0, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.666015625, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 5.188655376434326, |
|
"eval_allNLI-dev_euclidean_ap": 0.3752436345647485, |
|
"eval_allNLI-dev_euclidean_f1": 0.5112781954887218, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 14.686256408691406, |
|
"eval_allNLI-dev_euclidean_precision": 0.34552845528455284, |
|
"eval_allNLI-dev_euclidean_recall": 0.9826589595375722, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.66796875, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 81.46263122558594, |
|
"eval_allNLI-dev_manhattan_ap": 0.39261471479691196, |
|
"eval_allNLI-dev_manhattan_f1": 0.513677811550152, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 204.65167236328125, |
|
"eval_allNLI-dev_manhattan_precision": 0.34845360824742266, |
|
"eval_allNLI-dev_manhattan_recall": 0.976878612716763, |
|
"eval_allNLI-dev_max_accuracy": 0.669921875, |
|
"eval_allNLI-dev_max_accuracy_threshold": 1097.2392578125, |
|
"eval_allNLI-dev_max_ap": 0.39261471479691196, |
|
"eval_allNLI-dev_max_f1": 0.513677811550152, |
|
"eval_allNLI-dev_max_f1_threshold": 669.5403442382812, |
|
"eval_allNLI-dev_max_precision": 0.34845360824742266, |
|
"eval_allNLI-dev_max_recall": 1.0, |
|
"eval_sequential_score": 0.613470320794373, |
|
"eval_sts-test_pearson_cosine": 0.1720525202149525, |
|
"eval_sts-test_pearson_dot": 0.17715667152519826, |
|
"eval_sts-test_pearson_euclidean": 0.17331039588378508, |
|
"eval_sts-test_pearson_manhattan": 0.16267378650643669, |
|
"eval_sts-test_pearson_max": 0.17715667152519826, |
|
"eval_sts-test_spearman_cosine": 0.2278064303667034, |
|
"eval_sts-test_spearman_dot": 0.17562364025486388, |
|
"eval_sts-test_spearman_euclidean": 0.19856242082391304, |
|
"eval_sts-test_spearman_manhattan": 0.1882869494420496, |
|
"eval_sts-test_spearman_max": 0.2278064303667034, |
|
"eval_vitaminc-pairs_loss": 3.920011281967163, |
|
"eval_vitaminc-pairs_runtime": 5.4953, |
|
"eval_vitaminc-pairs_samples_per_second": 23.293, |
|
"eval_vitaminc-pairs_steps_per_second": 0.364, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07757951900698215, |
|
"eval_negation-triplets_loss": 5.354557991027832, |
|
"eval_negation-triplets_runtime": 1.0836, |
|
"eval_negation-triplets_samples_per_second": 118.12, |
|
"eval_negation-triplets_steps_per_second": 1.846, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07757951900698215, |
|
"eval_scitail-pairs-pos_loss": 2.650925636291504, |
|
"eval_scitail-pairs-pos_runtime": 1.2361, |
|
"eval_scitail-pairs-pos_samples_per_second": 103.552, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.618, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07757951900698215, |
|
"eval_scitail-pairs-qa_loss": 4.260052680969238, |
|
"eval_scitail-pairs-qa_runtime": 0.7647, |
|
"eval_scitail-pairs-qa_samples_per_second": 167.375, |
|
"eval_scitail-pairs-qa_steps_per_second": 2.615, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07757951900698215, |
|
"eval_xsum-pairs_loss": 5.345217227935791, |
|
"eval_xsum-pairs_runtime": 3.917, |
|
"eval_xsum-pairs_samples_per_second": 32.678, |
|
"eval_xsum-pairs_steps_per_second": 0.511, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07757951900698215, |
|
"eval_sciq_pairs_loss": 0.6231272220611572, |
|
"eval_sciq_pairs_runtime": 5.8736, |
|
"eval_sciq_pairs_samples_per_second": 21.793, |
|
"eval_sciq_pairs_steps_per_second": 0.341, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07757951900698215, |
|
"eval_qasc_pairs_loss": 5.399855136871338, |
|
"eval_qasc_pairs_runtime": 0.9118, |
|
"eval_qasc_pairs_samples_per_second": 140.388, |
|
"eval_qasc_pairs_steps_per_second": 2.194, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07757951900698215, |
|
"eval_openbookqa_pairs_loss": 5.2847161293029785, |
|
"eval_openbookqa_pairs_runtime": 0.8603, |
|
"eval_openbookqa_pairs_samples_per_second": 148.787, |
|
"eval_openbookqa_pairs_steps_per_second": 2.325, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07757951900698215, |
|
"eval_msmarco_pairs_loss": 6.665152072906494, |
|
"eval_msmarco_pairs_runtime": 1.9686, |
|
"eval_msmarco_pairs_samples_per_second": 65.021, |
|
"eval_msmarco_pairs_steps_per_second": 1.016, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07757951900698215, |
|
"eval_nq_pairs_loss": 6.199686050415039, |
|
"eval_nq_pairs_runtime": 3.3173, |
|
"eval_nq_pairs_samples_per_second": 38.585, |
|
"eval_nq_pairs_steps_per_second": 0.603, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07757951900698215, |
|
"eval_trivia_pairs_loss": 5.7401909828186035, |
|
"eval_trivia_pairs_runtime": 4.4762, |
|
"eval_trivia_pairs_samples_per_second": 28.595, |
|
"eval_trivia_pairs_steps_per_second": 0.447, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07757951900698215, |
|
"eval_gooaq_pairs_loss": 5.792109489440918, |
|
"eval_gooaq_pairs_runtime": 1.4527, |
|
"eval_gooaq_pairs_samples_per_second": 88.113, |
|
"eval_gooaq_pairs_steps_per_second": 1.377, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07757951900698215, |
|
"eval_paws-pos_loss": 2.8563427925109863, |
|
"eval_paws-pos_runtime": 1.0232, |
|
"eval_paws-pos_samples_per_second": 125.099, |
|
"eval_paws-pos_steps_per_second": 1.955, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07757951900698215, |
|
"eval_global_dataset_loss": 4.84829568862915, |
|
"eval_global_dataset_runtime": 19.3977, |
|
"eval_global_dataset_samples_per_second": 21.446, |
|
"eval_global_dataset_steps_per_second": 0.361, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07835531419705198, |
|
"grad_norm": 7.275325298309326, |
|
"learning_rate": 4.071680376028202e-06, |
|
"loss": 5.3294, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.0791311093871218, |
|
"grad_norm": 6.854203224182129, |
|
"learning_rate": 4.1128084606345476e-06, |
|
"loss": 5.0933, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.07990690457719161, |
|
"grad_norm": 7.766842365264893, |
|
"learning_rate": 4.153936545240893e-06, |
|
"loss": 4.9333, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.08068269976726145, |
|
"grad_norm": 7.3934478759765625, |
|
"learning_rate": 4.195064629847238e-06, |
|
"loss": 5.1327, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.08145849495733126, |
|
"grad_norm": 4.127883434295654, |
|
"learning_rate": 4.236192714453583e-06, |
|
"loss": 5.5859, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.08223429014740109, |
|
"grad_norm": 8.2522554397583, |
|
"learning_rate": 4.277320799059929e-06, |
|
"loss": 4.9192, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.08301008533747091, |
|
"grad_norm": 16.312501907348633, |
|
"learning_rate": 4.318448883666274e-06, |
|
"loss": 3.8092, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.08378588052754073, |
|
"grad_norm": 9.926016807556152, |
|
"learning_rate": 4.35957696827262e-06, |
|
"loss": 6.0247, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.08456167571761056, |
|
"grad_norm": 7.373856544494629, |
|
"learning_rate": 4.400705052878966e-06, |
|
"loss": 5.6878, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.08533747090768037, |
|
"grad_norm": 7.337434768676758, |
|
"learning_rate": 4.441833137485311e-06, |
|
"loss": 5.0953, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.08611326609775019, |
|
"grad_norm": 12.865263938903809, |
|
"learning_rate": 4.482961222091657e-06, |
|
"loss": 6.3449, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.08688906128782002, |
|
"grad_norm": 6.9256591796875, |
|
"learning_rate": 4.524089306698001e-06, |
|
"loss": 4.9109, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.08766485647788984, |
|
"grad_norm": 4.341299057006836, |
|
"learning_rate": 4.5652173913043474e-06, |
|
"loss": 5.6, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.08844065166795965, |
|
"grad_norm": 8.670815467834473, |
|
"learning_rate": 4.606345475910693e-06, |
|
"loss": 4.8765, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.08921644685802949, |
|
"grad_norm": 10.17563247680664, |
|
"learning_rate": 4.647473560517038e-06, |
|
"loss": 5.7062, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.0899922420480993, |
|
"grad_norm": 7.781512260437012, |
|
"learning_rate": 4.688601645123384e-06, |
|
"loss": 4.7221, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.09076803723816912, |
|
"grad_norm": 8.361579895019531, |
|
"learning_rate": 4.7297297297297294e-06, |
|
"loss": 4.8871, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.09154383242823895, |
|
"grad_norm": 9.360175132751465, |
|
"learning_rate": 4.770857814336075e-06, |
|
"loss": 4.7828, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.09231962761830877, |
|
"grad_norm": 4.893143177032471, |
|
"learning_rate": 4.81198589894242e-06, |
|
"loss": 5.5502, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.09309542280837858, |
|
"grad_norm": 9.300333976745605, |
|
"learning_rate": 4.853113983548766e-06, |
|
"loss": 4.6165, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09309542280837858, |
|
"eval_Qnli-dev_cosine_accuracy": 0.6015625, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9670753479003906, |
|
"eval_Qnli-dev_cosine_ap": 0.5722899415341274, |
|
"eval_Qnli-dev_cosine_f1": 0.6317204301075269, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.79914391040802, |
|
"eval_Qnli-dev_cosine_precision": 0.4625984251968504, |
|
"eval_Qnli-dev_cosine_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_dot_accuracy": 0.548828125, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 770.8974609375, |
|
"eval_Qnli-dev_dot_ap": 0.47908167649664435, |
|
"eval_Qnli-dev_dot_f1": 0.6317204301075269, |
|
"eval_Qnli-dev_dot_f1_threshold": 503.69384765625, |
|
"eval_Qnli-dev_dot_precision": 0.4625984251968504, |
|
"eval_Qnli-dev_dot_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.603515625, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 8.770221710205078, |
|
"eval_Qnli-dev_euclidean_ap": 0.5921268027422615, |
|
"eval_Qnli-dev_euclidean_f1": 0.6308724832214765, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 16.821897506713867, |
|
"eval_Qnli-dev_euclidean_precision": 0.46168958742632615, |
|
"eval_Qnli-dev_euclidean_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.625, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 144.48236083984375, |
|
"eval_Qnli-dev_manhattan_ap": 0.6088252763279582, |
|
"eval_Qnli-dev_manhattan_f1": 0.6329113924050632, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 209.13174438476562, |
|
"eval_Qnli-dev_manhattan_precision": 0.47368421052631576, |
|
"eval_Qnli-dev_manhattan_recall": 0.9533898305084746, |
|
"eval_Qnli-dev_max_accuracy": 0.625, |
|
"eval_Qnli-dev_max_accuracy_threshold": 770.8974609375, |
|
"eval_Qnli-dev_max_ap": 0.6088252763279582, |
|
"eval_Qnli-dev_max_f1": 0.6329113924050632, |
|
"eval_Qnli-dev_max_f1_threshold": 503.69384765625, |
|
"eval_Qnli-dev_max_precision": 0.47368421052631576, |
|
"eval_Qnli-dev_max_recall": 0.9957627118644068, |
|
"eval_allNLI-dev_cosine_accuracy": 0.6796875, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9780304431915283, |
|
"eval_allNLI-dev_cosine_ap": 0.3992786539720722, |
|
"eval_allNLI-dev_cosine_f1": 0.5098634294385432, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.8867166042327881, |
|
"eval_allNLI-dev_cosine_precision": 0.345679012345679, |
|
"eval_allNLI-dev_cosine_recall": 0.9710982658959537, |
|
"eval_allNLI-dev_dot_accuracy": 0.66015625, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 993.18896484375, |
|
"eval_allNLI-dev_dot_ap": 0.32359114658021904, |
|
"eval_allNLI-dev_dot_f1": 0.5080763582966226, |
|
"eval_allNLI-dev_dot_f1_threshold": 613.2178344726562, |
|
"eval_allNLI-dev_dot_precision": 0.3405511811023622, |
|
"eval_allNLI-dev_dot_recall": 1.0, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.671875, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 6.144635200500488, |
|
"eval_allNLI-dev_euclidean_ap": 0.3919696045318126, |
|
"eval_allNLI-dev_euclidean_f1": 0.5120481927710844, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 14.77474594116211, |
|
"eval_allNLI-dev_euclidean_precision": 0.34623217922606925, |
|
"eval_allNLI-dev_euclidean_recall": 0.9826589595375722, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.66796875, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 91.93115234375, |
|
"eval_allNLI-dev_manhattan_ap": 0.404528316560946, |
|
"eval_allNLI-dev_manhattan_f1": 0.5082212257100149, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 230.32806396484375, |
|
"eval_allNLI-dev_manhattan_precision": 0.34274193548387094, |
|
"eval_allNLI-dev_manhattan_recall": 0.9826589595375722, |
|
"eval_allNLI-dev_max_accuracy": 0.6796875, |
|
"eval_allNLI-dev_max_accuracy_threshold": 993.18896484375, |
|
"eval_allNLI-dev_max_ap": 0.404528316560946, |
|
"eval_allNLI-dev_max_f1": 0.5120481927710844, |
|
"eval_allNLI-dev_max_f1_threshold": 613.2178344726562, |
|
"eval_allNLI-dev_max_precision": 0.34623217922606925, |
|
"eval_allNLI-dev_max_recall": 1.0, |
|
"eval_sequential_score": 0.6088252763279582, |
|
"eval_sts-test_pearson_cosine": 0.18225596814280462, |
|
"eval_sts-test_pearson_dot": 0.19389246184304787, |
|
"eval_sts-test_pearson_euclidean": 0.18398275691369742, |
|
"eval_sts-test_pearson_manhattan": 0.17222751044724327, |
|
"eval_sts-test_pearson_max": 0.19389246184304787, |
|
"eval_sts-test_spearman_cosine": 0.23913221385342298, |
|
"eval_sts-test_spearman_dot": 0.19319844513101708, |
|
"eval_sts-test_spearman_euclidean": 0.2106999921922726, |
|
"eval_sts-test_spearman_manhattan": 0.1993430170763632, |
|
"eval_sts-test_spearman_max": 0.23913221385342298, |
|
"eval_vitaminc-pairs_loss": 3.8911848068237305, |
|
"eval_vitaminc-pairs_runtime": 5.47, |
|
"eval_vitaminc-pairs_samples_per_second": 23.4, |
|
"eval_vitaminc-pairs_steps_per_second": 0.366, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09309542280837858, |
|
"eval_negation-triplets_loss": 5.301531791687012, |
|
"eval_negation-triplets_runtime": 1.0751, |
|
"eval_negation-triplets_samples_per_second": 119.055, |
|
"eval_negation-triplets_steps_per_second": 1.86, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09309542280837858, |
|
"eval_scitail-pairs-pos_loss": 2.535888910293579, |
|
"eval_scitail-pairs-pos_runtime": 1.2447, |
|
"eval_scitail-pairs-pos_samples_per_second": 102.834, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.607, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09309542280837858, |
|
"eval_scitail-pairs-qa_loss": 4.098081588745117, |
|
"eval_scitail-pairs-qa_runtime": 0.7828, |
|
"eval_scitail-pairs-qa_samples_per_second": 163.513, |
|
"eval_scitail-pairs-qa_steps_per_second": 2.555, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09309542280837858, |
|
"eval_xsum-pairs_loss": 5.268539905548096, |
|
"eval_xsum-pairs_runtime": 3.9213, |
|
"eval_xsum-pairs_samples_per_second": 32.642, |
|
"eval_xsum-pairs_steps_per_second": 0.51, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09309542280837858, |
|
"eval_sciq_pairs_loss": 0.607820987701416, |
|
"eval_sciq_pairs_runtime": 5.8857, |
|
"eval_sciq_pairs_samples_per_second": 21.748, |
|
"eval_sciq_pairs_steps_per_second": 0.34, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09309542280837858, |
|
"eval_qasc_pairs_loss": 5.1934356689453125, |
|
"eval_qasc_pairs_runtime": 0.9116, |
|
"eval_qasc_pairs_samples_per_second": 140.413, |
|
"eval_qasc_pairs_steps_per_second": 2.194, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09309542280837858, |
|
"eval_openbookqa_pairs_loss": 5.243656158447266, |
|
"eval_openbookqa_pairs_runtime": 0.8604, |
|
"eval_openbookqa_pairs_samples_per_second": 148.766, |
|
"eval_openbookqa_pairs_steps_per_second": 2.324, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09309542280837858, |
|
"eval_msmarco_pairs_loss": 6.208409309387207, |
|
"eval_msmarco_pairs_runtime": 1.9688, |
|
"eval_msmarco_pairs_samples_per_second": 65.013, |
|
"eval_msmarco_pairs_steps_per_second": 1.016, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09309542280837858, |
|
"eval_nq_pairs_loss": 5.938248157501221, |
|
"eval_nq_pairs_runtime": 3.3259, |
|
"eval_nq_pairs_samples_per_second": 38.486, |
|
"eval_nq_pairs_steps_per_second": 0.601, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09309542280837858, |
|
"eval_trivia_pairs_loss": 5.63157844543457, |
|
"eval_trivia_pairs_runtime": 4.4797, |
|
"eval_trivia_pairs_samples_per_second": 28.574, |
|
"eval_trivia_pairs_steps_per_second": 0.446, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09309542280837858, |
|
"eval_gooaq_pairs_loss": 5.568259239196777, |
|
"eval_gooaq_pairs_runtime": 1.4576, |
|
"eval_gooaq_pairs_samples_per_second": 87.818, |
|
"eval_gooaq_pairs_steps_per_second": 1.372, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09309542280837858, |
|
"eval_paws-pos_loss": 2.3947365283966064, |
|
"eval_paws-pos_runtime": 1.0203, |
|
"eval_paws-pos_samples_per_second": 125.448, |
|
"eval_paws-pos_steps_per_second": 1.96, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09309542280837858, |
|
"eval_global_dataset_loss": 4.51961088180542, |
|
"eval_global_dataset_runtime": 19.3948, |
|
"eval_global_dataset_samples_per_second": 21.449, |
|
"eval_global_dataset_steps_per_second": 0.361, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09387121799844841, |
|
"grad_norm": 8.517334938049316, |
|
"learning_rate": 4.894242068155111e-06, |
|
"loss": 4.892, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.09464701318851823, |
|
"grad_norm": 11.708131790161133, |
|
"learning_rate": 4.935370152761457e-06, |
|
"loss": 4.6732, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.09542280837858805, |
|
"grad_norm": 8.222532272338867, |
|
"learning_rate": 4.976498237367803e-06, |
|
"loss": 5.4697, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.09619860356865788, |
|
"grad_norm": 9.485191345214844, |
|
"learning_rate": 5.017626321974147e-06, |
|
"loss": 4.777, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.0969743987587277, |
|
"grad_norm": 19.364856719970703, |
|
"learning_rate": 5.0587544065804934e-06, |
|
"loss": 6.3552, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.09775019394879751, |
|
"grad_norm": 9.71495246887207, |
|
"learning_rate": 5.099882491186839e-06, |
|
"loss": 4.8758, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.09852598913886734, |
|
"grad_norm": 23.59428596496582, |
|
"learning_rate": 5.141010575793184e-06, |
|
"loss": 3.029, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.09930178432893716, |
|
"grad_norm": 15.757760047912598, |
|
"learning_rate": 5.182138660399529e-06, |
|
"loss": 5.7908, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.10007757951900698, |
|
"grad_norm": 20.3161678314209, |
|
"learning_rate": 5.2232667450058754e-06, |
|
"loss": 6.3179, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.1008533747090768, |
|
"grad_norm": 13.285233497619629, |
|
"learning_rate": 5.26439482961222e-06, |
|
"loss": 5.7515, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.10162916989914662, |
|
"grad_norm": 11.527393341064453, |
|
"learning_rate": 5.305522914218566e-06, |
|
"loss": 4.4534, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.10240496508921644, |
|
"grad_norm": 10.770364761352539, |
|
"learning_rate": 5.346650998824911e-06, |
|
"loss": 4.5598, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.10318076027928627, |
|
"grad_norm": 13.983633995056152, |
|
"learning_rate": 5.387779083431257e-06, |
|
"loss": 4.3048, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.10395655546935609, |
|
"grad_norm": 24.10936164855957, |
|
"learning_rate": 5.428907168037603e-06, |
|
"loss": 2.7566, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.10473235065942592, |
|
"grad_norm": 10.636360168457031, |
|
"learning_rate": 5.470035252643947e-06, |
|
"loss": 4.3254, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.10550814584949574, |
|
"grad_norm": 10.882925987243652, |
|
"learning_rate": 5.511163337250293e-06, |
|
"loss": 4.4835, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.10628394103956555, |
|
"grad_norm": 23.658288955688477, |
|
"learning_rate": 5.5522914218566394e-06, |
|
"loss": 4.4302, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.10705973622963538, |
|
"grad_norm": 11.732893943786621, |
|
"learning_rate": 5.593419506462984e-06, |
|
"loss": 4.2167, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.1078355314197052, |
|
"grad_norm": 15.095183372497559, |
|
"learning_rate": 5.63454759106933e-06, |
|
"loss": 4.3158, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.10861132660977502, |
|
"grad_norm": 19.106853485107422, |
|
"learning_rate": 5.675675675675675e-06, |
|
"loss": 5.9198, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10861132660977502, |
|
"eval_Qnli-dev_cosine_accuracy": 0.6015625, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.932540237903595, |
|
"eval_Qnli-dev_cosine_ap": 0.590162641547892, |
|
"eval_Qnli-dev_cosine_f1": 0.6358381502890174, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.8430850505828857, |
|
"eval_Qnli-dev_cosine_precision": 0.4824561403508772, |
|
"eval_Qnli-dev_cosine_recall": 0.9322033898305084, |
|
"eval_Qnli-dev_dot_accuracy": 0.568359375, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 651.802734375, |
|
"eval_Qnli-dev_dot_ap": 0.4990308476722506, |
|
"eval_Qnli-dev_dot_f1": 0.6318607764390897, |
|
"eval_Qnli-dev_dot_f1_threshold": 359.81072998046875, |
|
"eval_Qnli-dev_dot_precision": 0.461839530332681, |
|
"eval_Qnli-dev_dot_recall": 1.0, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.607421875, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 10.347358703613281, |
|
"eval_Qnli-dev_euclidean_ap": 0.5989545722402487, |
|
"eval_Qnli-dev_euclidean_f1": 0.6300268096514745, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 19.583602905273438, |
|
"eval_Qnli-dev_euclidean_precision": 0.46078431372549017, |
|
"eval_Qnli-dev_euclidean_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.62109375, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 161.458251953125, |
|
"eval_Qnli-dev_manhattan_ap": 0.6134087495053078, |
|
"eval_Qnli-dev_manhattan_f1": 0.6406685236768802, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 260.0387268066406, |
|
"eval_Qnli-dev_manhattan_precision": 0.47717842323651455, |
|
"eval_Qnli-dev_manhattan_recall": 0.9745762711864406, |
|
"eval_Qnli-dev_max_accuracy": 0.62109375, |
|
"eval_Qnli-dev_max_accuracy_threshold": 651.802734375, |
|
"eval_Qnli-dev_max_ap": 0.6134087495053078, |
|
"eval_Qnli-dev_max_f1": 0.6406685236768802, |
|
"eval_Qnli-dev_max_f1_threshold": 359.81072998046875, |
|
"eval_Qnli-dev_max_precision": 0.4824561403508772, |
|
"eval_Qnli-dev_max_recall": 1.0, |
|
"eval_allNLI-dev_cosine_accuracy": 0.68359375, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.971808135509491, |
|
"eval_allNLI-dev_cosine_ap": 0.41916615465917384, |
|
"eval_allNLI-dev_cosine_f1": 0.5088, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.8694682121276855, |
|
"eval_allNLI-dev_cosine_precision": 0.35176991150442477, |
|
"eval_allNLI-dev_cosine_recall": 0.9190751445086706, |
|
"eval_allNLI-dev_dot_accuracy": 0.6640625, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 875.8065185546875, |
|
"eval_allNLI-dev_dot_ap": 0.33345963244863186, |
|
"eval_allNLI-dev_dot_f1": 0.5081723625557207, |
|
"eval_allNLI-dev_dot_f1_threshold": 514.4766845703125, |
|
"eval_allNLI-dev_dot_precision": 0.342, |
|
"eval_allNLI-dev_dot_recall": 0.9884393063583815, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.673828125, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 6.979422092437744, |
|
"eval_allNLI-dev_euclidean_ap": 0.41272090015066604, |
|
"eval_allNLI-dev_euclidean_f1": 0.5102362204724409, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 14.31092643737793, |
|
"eval_allNLI-dev_euclidean_precision": 0.35064935064935066, |
|
"eval_allNLI-dev_euclidean_recall": 0.9364161849710982, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.673828125, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 113.5325698852539, |
|
"eval_allNLI-dev_manhattan_ap": 0.4170134796589544, |
|
"eval_allNLI-dev_manhattan_f1": 0.5065885797950219, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 311.357177734375, |
|
"eval_allNLI-dev_manhattan_precision": 0.3392156862745098, |
|
"eval_allNLI-dev_manhattan_recall": 1.0, |
|
"eval_allNLI-dev_max_accuracy": 0.68359375, |
|
"eval_allNLI-dev_max_accuracy_threshold": 875.8065185546875, |
|
"eval_allNLI-dev_max_ap": 0.41916615465917384, |
|
"eval_allNLI-dev_max_f1": 0.5102362204724409, |
|
"eval_allNLI-dev_max_f1_threshold": 514.4766845703125, |
|
"eval_allNLI-dev_max_precision": 0.35176991150442477, |
|
"eval_allNLI-dev_max_recall": 1.0, |
|
"eval_sequential_score": 0.6134087495053078, |
|
"eval_sts-test_pearson_cosine": 0.19606202486057506, |
|
"eval_sts-test_pearson_dot": 0.20910750938672273, |
|
"eval_sts-test_pearson_euclidean": 0.20204170742350583, |
|
"eval_sts-test_pearson_manhattan": 0.19980066844086258, |
|
"eval_sts-test_pearson_max": 0.20910750938672273, |
|
"eval_sts-test_spearman_cosine": 0.2570204565643615, |
|
"eval_sts-test_spearman_dot": 0.2142791085758339, |
|
"eval_sts-test_spearman_euclidean": 0.22658230436260302, |
|
"eval_sts-test_spearman_manhattan": 0.2257667126237448, |
|
"eval_sts-test_spearman_max": 0.2570204565643615, |
|
"eval_vitaminc-pairs_loss": 3.860930919647217, |
|
"eval_vitaminc-pairs_runtime": 5.4616, |
|
"eval_vitaminc-pairs_samples_per_second": 23.437, |
|
"eval_vitaminc-pairs_steps_per_second": 0.366, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10861132660977502, |
|
"eval_negation-triplets_loss": 5.22545051574707, |
|
"eval_negation-triplets_runtime": 1.061, |
|
"eval_negation-triplets_samples_per_second": 120.643, |
|
"eval_negation-triplets_steps_per_second": 1.885, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10861132660977502, |
|
"eval_scitail-pairs-pos_loss": 2.2156143188476562, |
|
"eval_scitail-pairs-pos_runtime": 1.2311, |
|
"eval_scitail-pairs-pos_samples_per_second": 103.974, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.625, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10861132660977502, |
|
"eval_scitail-pairs-qa_loss": 3.2973973751068115, |
|
"eval_scitail-pairs-qa_runtime": 0.7709, |
|
"eval_scitail-pairs-qa_samples_per_second": 166.038, |
|
"eval_scitail-pairs-qa_steps_per_second": 2.594, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10861132660977502, |
|
"eval_xsum-pairs_loss": 5.160762310028076, |
|
"eval_xsum-pairs_runtime": 3.9073, |
|
"eval_xsum-pairs_samples_per_second": 32.759, |
|
"eval_xsum-pairs_steps_per_second": 0.512, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10861132660977502, |
|
"eval_sciq_pairs_loss": 0.6155482530593872, |
|
"eval_sciq_pairs_runtime": 5.8124, |
|
"eval_sciq_pairs_samples_per_second": 22.022, |
|
"eval_sciq_pairs_steps_per_second": 0.344, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10861132660977502, |
|
"eval_qasc_pairs_loss": 5.0954060554504395, |
|
"eval_qasc_pairs_runtime": 0.9074, |
|
"eval_qasc_pairs_samples_per_second": 141.069, |
|
"eval_qasc_pairs_steps_per_second": 2.204, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10861132660977502, |
|
"eval_openbookqa_pairs_loss": 5.480427265167236, |
|
"eval_openbookqa_pairs_runtime": 0.8517, |
|
"eval_openbookqa_pairs_samples_per_second": 150.279, |
|
"eval_openbookqa_pairs_steps_per_second": 2.348, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10861132660977502, |
|
"eval_msmarco_pairs_loss": 6.218542575836182, |
|
"eval_msmarco_pairs_runtime": 1.9635, |
|
"eval_msmarco_pairs_samples_per_second": 65.189, |
|
"eval_msmarco_pairs_steps_per_second": 1.019, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10861132660977502, |
|
"eval_nq_pairs_loss": 5.9674296379089355, |
|
"eval_nq_pairs_runtime": 3.3526, |
|
"eval_nq_pairs_samples_per_second": 38.179, |
|
"eval_nq_pairs_steps_per_second": 0.597, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10861132660977502, |
|
"eval_trivia_pairs_loss": 6.06894063949585, |
|
"eval_trivia_pairs_runtime": 4.4755, |
|
"eval_trivia_pairs_samples_per_second": 28.6, |
|
"eval_trivia_pairs_steps_per_second": 0.447, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10861132660977502, |
|
"eval_gooaq_pairs_loss": 5.648138523101807, |
|
"eval_gooaq_pairs_runtime": 1.4628, |
|
"eval_gooaq_pairs_samples_per_second": 87.504, |
|
"eval_gooaq_pairs_steps_per_second": 1.367, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10861132660977502, |
|
"eval_paws-pos_loss": 1.3320106267929077, |
|
"eval_paws-pos_runtime": 1.0268, |
|
"eval_paws-pos_samples_per_second": 124.656, |
|
"eval_paws-pos_steps_per_second": 1.948, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10861132660977502, |
|
"eval_global_dataset_loss": 4.069947242736816, |
|
"eval_global_dataset_runtime": 19.4022, |
|
"eval_global_dataset_samples_per_second": 21.441, |
|
"eval_global_dataset_steps_per_second": 0.361, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10938712179984485, |
|
"grad_norm": 17.907608032226562, |
|
"learning_rate": 5.716803760282021e-06, |
|
"loss": 5.8619, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.11016291698991466, |
|
"grad_norm": 12.681406021118164, |
|
"learning_rate": 5.757931844888366e-06, |
|
"loss": 4.42, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.11093871217998448, |
|
"grad_norm": 29.748260498046875, |
|
"learning_rate": 5.799059929494712e-06, |
|
"loss": 6.5458, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.11171450737005431, |
|
"grad_norm": 17.36277198791504, |
|
"learning_rate": 5.8401880141010565e-06, |
|
"loss": 3.7463, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.11249030256012413, |
|
"grad_norm": 15.745108604431152, |
|
"learning_rate": 5.881316098707403e-06, |
|
"loss": 4.8236, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.11326609775019394, |
|
"grad_norm": 17.49993896484375, |
|
"learning_rate": 5.922444183313749e-06, |
|
"loss": 5.9082, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.11404189294026378, |
|
"grad_norm": 11.602448463439941, |
|
"learning_rate": 5.963572267920093e-06, |
|
"loss": 3.9276, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.11481768813033359, |
|
"grad_norm": 14.728039741516113, |
|
"learning_rate": 6.004700352526439e-06, |
|
"loss": 4.3073, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.11559348332040341, |
|
"grad_norm": 12.118491172790527, |
|
"learning_rate": 6.045828437132785e-06, |
|
"loss": 4.06, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.11636927851047324, |
|
"grad_norm": 14.151768684387207, |
|
"learning_rate": 6.08695652173913e-06, |
|
"loss": 4.7036, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11714507370054306, |
|
"grad_norm": 12.606175422668457, |
|
"learning_rate": 6.128084606345475e-06, |
|
"loss": 3.9509, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.11792086889061287, |
|
"grad_norm": 38.992183685302734, |
|
"learning_rate": 6.1692126909518205e-06, |
|
"loss": 6.3894, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.1186966640806827, |
|
"grad_norm": 13.61286449432373, |
|
"learning_rate": 6.210340775558167e-06, |
|
"loss": 4.4063, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.11947245927075252, |
|
"grad_norm": 17.750974655151367, |
|
"learning_rate": 6.251468860164512e-06, |
|
"loss": 3.6723, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.12024825446082234, |
|
"grad_norm": 17.65484046936035, |
|
"learning_rate": 6.292596944770857e-06, |
|
"loss": 5.462, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.12102404965089217, |
|
"grad_norm": 12.64035415649414, |
|
"learning_rate": 6.3337250293772025e-06, |
|
"loss": 4.579, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.12179984484096198, |
|
"grad_norm": 11.979784965515137, |
|
"learning_rate": 6.374853113983549e-06, |
|
"loss": 3.9228, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.1225756400310318, |
|
"grad_norm": 12.76739501953125, |
|
"learning_rate": 6.415981198589893e-06, |
|
"loss": 3.6178, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.12335143522110163, |
|
"grad_norm": 27.8046817779541, |
|
"learning_rate": 6.457109283196239e-06, |
|
"loss": 5.686, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.12412723041117145, |
|
"grad_norm": 12.14214038848877, |
|
"learning_rate": 6.498237367802585e-06, |
|
"loss": 3.5491, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12412723041117145, |
|
"eval_Qnli-dev_cosine_accuracy": 0.611328125, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9235547780990601, |
|
"eval_Qnli-dev_cosine_ap": 0.5831965528167927, |
|
"eval_Qnli-dev_cosine_f1": 0.6318758815232722, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.8174055218696594, |
|
"eval_Qnli-dev_cosine_precision": 0.47357293868921774, |
|
"eval_Qnli-dev_cosine_recall": 0.9491525423728814, |
|
"eval_Qnli-dev_dot_accuracy": 0.564453125, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 650.8389892578125, |
|
"eval_Qnli-dev_dot_ap": 0.5067183783295084, |
|
"eval_Qnli-dev_dot_f1": 0.6327077747989276, |
|
"eval_Qnli-dev_dot_f1_threshold": 399.63836669921875, |
|
"eval_Qnli-dev_dot_precision": 0.4627450980392157, |
|
"eval_Qnli-dev_dot_recall": 1.0, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.6171875, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 10.054267883300781, |
|
"eval_Qnli-dev_euclidean_ap": 0.5948450213638221, |
|
"eval_Qnli-dev_euclidean_f1": 0.6318607764390897, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 20.041950225830078, |
|
"eval_Qnli-dev_euclidean_precision": 0.461839530332681, |
|
"eval_Qnli-dev_euclidean_recall": 1.0, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.625, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 173.52857971191406, |
|
"eval_Qnli-dev_manhattan_ap": 0.6122239571226092, |
|
"eval_Qnli-dev_manhattan_f1": 0.6353591160220995, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 274.06744384765625, |
|
"eval_Qnli-dev_manhattan_precision": 0.4713114754098361, |
|
"eval_Qnli-dev_manhattan_recall": 0.9745762711864406, |
|
"eval_Qnli-dev_max_accuracy": 0.625, |
|
"eval_Qnli-dev_max_accuracy_threshold": 650.8389892578125, |
|
"eval_Qnli-dev_max_ap": 0.6122239571226092, |
|
"eval_Qnli-dev_max_f1": 0.6353591160220995, |
|
"eval_Qnli-dev_max_f1_threshold": 399.63836669921875, |
|
"eval_Qnli-dev_max_precision": 0.47357293868921774, |
|
"eval_Qnli-dev_max_recall": 1.0, |
|
"eval_allNLI-dev_cosine_accuracy": 0.6796875, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9759693145751953, |
|
"eval_allNLI-dev_cosine_ap": 0.42359590129311003, |
|
"eval_allNLI-dev_cosine_f1": 0.5105740181268882, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.8282982110977173, |
|
"eval_allNLI-dev_cosine_precision": 0.3456032719836401, |
|
"eval_allNLI-dev_cosine_recall": 0.976878612716763, |
|
"eval_allNLI-dev_dot_accuracy": 0.66796875, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 799.5001831054688, |
|
"eval_allNLI-dev_dot_ap": 0.3510979636740506, |
|
"eval_allNLI-dev_dot_f1": 0.5089820359281437, |
|
"eval_allNLI-dev_dot_f1_threshold": 532.1497802734375, |
|
"eval_allNLI-dev_dot_precision": 0.3434343434343434, |
|
"eval_allNLI-dev_dot_recall": 0.9826589595375722, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.673828125, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 6.110721588134766, |
|
"eval_allNLI-dev_euclidean_ap": 0.4178547437007212, |
|
"eval_allNLI-dev_euclidean_f1": 0.519134775374376, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 13.405746459960938, |
|
"eval_allNLI-dev_euclidean_precision": 0.3644859813084112, |
|
"eval_allNLI-dev_euclidean_recall": 0.9017341040462428, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.67578125, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 103.17061614990234, |
|
"eval_allNLI-dev_manhattan_ap": 0.4272300717524159, |
|
"eval_allNLI-dev_manhattan_f1": 0.5107033639143731, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 245.95425415039062, |
|
"eval_allNLI-dev_manhattan_precision": 0.3471933471933472, |
|
"eval_allNLI-dev_manhattan_recall": 0.9653179190751445, |
|
"eval_allNLI-dev_max_accuracy": 0.6796875, |
|
"eval_allNLI-dev_max_accuracy_threshold": 799.5001831054688, |
|
"eval_allNLI-dev_max_ap": 0.4272300717524159, |
|
"eval_allNLI-dev_max_f1": 0.519134775374376, |
|
"eval_allNLI-dev_max_f1_threshold": 532.1497802734375, |
|
"eval_allNLI-dev_max_precision": 0.3644859813084112, |
|
"eval_allNLI-dev_max_recall": 0.9826589595375722, |
|
"eval_sequential_score": 0.6122239571226092, |
|
"eval_sts-test_pearson_cosine": 0.1983814843936258, |
|
"eval_sts-test_pearson_dot": 0.21567697770934743, |
|
"eval_sts-test_pearson_euclidean": 0.2076928261154226, |
|
"eval_sts-test_pearson_manhattan": 0.2134685079189917, |
|
"eval_sts-test_pearson_max": 0.21567697770934743, |
|
"eval_sts-test_spearman_cosine": 0.2667653122399071, |
|
"eval_sts-test_spearman_dot": 0.21757047434864857, |
|
"eval_sts-test_spearman_euclidean": 0.2401902679264998, |
|
"eval_sts-test_spearman_manhattan": 0.24235085882647658, |
|
"eval_sts-test_spearman_max": 0.2667653122399071, |
|
"eval_vitaminc-pairs_loss": 3.85381817817688, |
|
"eval_vitaminc-pairs_runtime": 5.4462, |
|
"eval_vitaminc-pairs_samples_per_second": 23.503, |
|
"eval_vitaminc-pairs_steps_per_second": 0.367, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12412723041117145, |
|
"eval_negation-triplets_loss": 5.059137344360352, |
|
"eval_negation-triplets_runtime": 1.0529, |
|
"eval_negation-triplets_samples_per_second": 121.569, |
|
"eval_negation-triplets_steps_per_second": 1.9, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12412723041117145, |
|
"eval_scitail-pairs-pos_loss": 1.9513375759124756, |
|
"eval_scitail-pairs-pos_runtime": 1.2192, |
|
"eval_scitail-pairs-pos_samples_per_second": 104.99, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.64, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12412723041117145, |
|
"eval_scitail-pairs-qa_loss": 2.7738723754882812, |
|
"eval_scitail-pairs-qa_runtime": 0.7658, |
|
"eval_scitail-pairs-qa_samples_per_second": 167.149, |
|
"eval_scitail-pairs-qa_steps_per_second": 2.612, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12412723041117145, |
|
"eval_xsum-pairs_loss": 4.9543375968933105, |
|
"eval_xsum-pairs_runtime": 3.9138, |
|
"eval_xsum-pairs_samples_per_second": 32.705, |
|
"eval_xsum-pairs_steps_per_second": 0.511, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12412723041117145, |
|
"eval_sciq_pairs_loss": 0.5856431722640991, |
|
"eval_sciq_pairs_runtime": 5.846, |
|
"eval_sciq_pairs_samples_per_second": 21.895, |
|
"eval_sciq_pairs_steps_per_second": 0.342, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12412723041117145, |
|
"eval_qasc_pairs_loss": 4.644654750823975, |
|
"eval_qasc_pairs_runtime": 0.9037, |
|
"eval_qasc_pairs_samples_per_second": 141.642, |
|
"eval_qasc_pairs_steps_per_second": 2.213, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12412723041117145, |
|
"eval_openbookqa_pairs_loss": 5.493581771850586, |
|
"eval_openbookqa_pairs_runtime": 0.8617, |
|
"eval_openbookqa_pairs_samples_per_second": 148.55, |
|
"eval_openbookqa_pairs_steps_per_second": 2.321, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12412723041117145, |
|
"eval_msmarco_pairs_loss": 5.456345081329346, |
|
"eval_msmarco_pairs_runtime": 1.9747, |
|
"eval_msmarco_pairs_samples_per_second": 64.82, |
|
"eval_msmarco_pairs_steps_per_second": 1.013, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12412723041117145, |
|
"eval_nq_pairs_loss": 5.471042633056641, |
|
"eval_nq_pairs_runtime": 3.332, |
|
"eval_nq_pairs_samples_per_second": 38.416, |
|
"eval_nq_pairs_steps_per_second": 0.6, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12412723041117145, |
|
"eval_trivia_pairs_loss": 5.90379524230957, |
|
"eval_trivia_pairs_runtime": 4.4746, |
|
"eval_trivia_pairs_samples_per_second": 28.606, |
|
"eval_trivia_pairs_steps_per_second": 0.447, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12412723041117145, |
|
"eval_gooaq_pairs_loss": 5.094766139984131, |
|
"eval_gooaq_pairs_runtime": 1.4626, |
|
"eval_gooaq_pairs_samples_per_second": 87.517, |
|
"eval_gooaq_pairs_steps_per_second": 1.367, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12412723041117145, |
|
"eval_paws-pos_loss": 1.0914796590805054, |
|
"eval_paws-pos_runtime": 1.0277, |
|
"eval_paws-pos_samples_per_second": 124.555, |
|
"eval_paws-pos_steps_per_second": 1.946, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12412723041117145, |
|
"eval_global_dataset_loss": 3.7029871940612793, |
|
"eval_global_dataset_runtime": 19.3902, |
|
"eval_global_dataset_samples_per_second": 21.454, |
|
"eval_global_dataset_steps_per_second": 0.361, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12490302560124127, |
|
"grad_norm": 12.19548511505127, |
|
"learning_rate": 6.53936545240893e-06, |
|
"loss": 4.0703, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.1256788207913111, |
|
"grad_norm": 11.81810188293457, |
|
"learning_rate": 6.580493537015276e-06, |
|
"loss": 3.7131, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.1264546159813809, |
|
"grad_norm": 12.847527503967285, |
|
"learning_rate": 6.621621621621621e-06, |
|
"loss": 3.8675, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.12723041117145073, |
|
"grad_norm": 12.332374572753906, |
|
"learning_rate": 6.6627497062279665e-06, |
|
"loss": 3.6557, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.12800620636152055, |
|
"grad_norm": 15.913554191589355, |
|
"learning_rate": 6.703877790834312e-06, |
|
"loss": 5.5114, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.1287820015515904, |
|
"grad_norm": 12.6423978805542, |
|
"learning_rate": 6.745005875440658e-06, |
|
"loss": 3.5924, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.1295577967416602, |
|
"grad_norm": 11.64156723022461, |
|
"learning_rate": 6.786133960047002e-06, |
|
"loss": 3.7331, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.13033359193173003, |
|
"grad_norm": 14.570131301879883, |
|
"learning_rate": 6.8272620446533485e-06, |
|
"loss": 5.2668, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.13110938712179984, |
|
"grad_norm": 13.375134468078613, |
|
"learning_rate": 6.868390129259695e-06, |
|
"loss": 3.5033, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.13188518231186966, |
|
"grad_norm": 13.718034744262695, |
|
"learning_rate": 6.909518213866039e-06, |
|
"loss": 3.9921, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.13266097750193948, |
|
"grad_norm": 12.68748950958252, |
|
"learning_rate": 6.950646298472385e-06, |
|
"loss": 3.3935, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.13343677269200932, |
|
"grad_norm": 18.421518325805664, |
|
"learning_rate": 6.99177438307873e-06, |
|
"loss": 4.9198, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.13421256788207914, |
|
"grad_norm": 21.028749465942383, |
|
"learning_rate": 7.032902467685076e-06, |
|
"loss": 1.3145, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.13498836307214895, |
|
"grad_norm": 21.025897979736328, |
|
"learning_rate": 7.074030552291421e-06, |
|
"loss": 5.0971, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.13576415826221877, |
|
"grad_norm": 16.524944305419922, |
|
"learning_rate": 7.115158636897766e-06, |
|
"loss": 4.7722, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1365399534522886, |
|
"grad_norm": 15.925724029541016, |
|
"learning_rate": 7.1562867215041125e-06, |
|
"loss": 4.6407, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.1373157486423584, |
|
"grad_norm": 19.057390213012695, |
|
"learning_rate": 7.197414806110458e-06, |
|
"loss": 5.1164, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.13809154383242825, |
|
"grad_norm": 15.260638236999512, |
|
"learning_rate": 7.238542890716803e-06, |
|
"loss": 3.751, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.13886733902249807, |
|
"grad_norm": 30.054716110229492, |
|
"learning_rate": 7.279670975323148e-06, |
|
"loss": 5.4906, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.13964313421256788, |
|
"grad_norm": 25.419301986694336, |
|
"learning_rate": 7.3207990599294945e-06, |
|
"loss": 5.1472, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13964313421256788, |
|
"eval_Qnli-dev_cosine_accuracy": 0.615234375, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9118244647979736, |
|
"eval_Qnli-dev_cosine_ap": 0.5757630053560099, |
|
"eval_Qnli-dev_cosine_f1": 0.6347469220246238, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.708939254283905, |
|
"eval_Qnli-dev_cosine_precision": 0.4686868686868687, |
|
"eval_Qnli-dev_cosine_recall": 0.9830508474576272, |
|
"eval_Qnli-dev_dot_accuracy": 0.57421875, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 521.3780517578125, |
|
"eval_Qnli-dev_dot_ap": 0.5092251488391427, |
|
"eval_Qnli-dev_dot_f1": 0.6317204301075269, |
|
"eval_Qnli-dev_dot_f1_threshold": 351.0001220703125, |
|
"eval_Qnli-dev_dot_precision": 0.4625984251968504, |
|
"eval_Qnli-dev_dot_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.609375, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 10.459676742553711, |
|
"eval_Qnli-dev_euclidean_ap": 0.5817988420936584, |
|
"eval_Qnli-dev_euclidean_f1": 0.6318607764390897, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 21.422916412353516, |
|
"eval_Qnli-dev_euclidean_precision": 0.461839530332681, |
|
"eval_Qnli-dev_euclidean_recall": 1.0, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.619140625, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 212.72662353515625, |
|
"eval_Qnli-dev_manhattan_ap": 0.5901977757397192, |
|
"eval_Qnli-dev_manhattan_f1": 0.6324324324324324, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 349.28887939453125, |
|
"eval_Qnli-dev_manhattan_precision": 0.4642857142857143, |
|
"eval_Qnli-dev_manhattan_recall": 0.9915254237288136, |
|
"eval_Qnli-dev_max_accuracy": 0.619140625, |
|
"eval_Qnli-dev_max_accuracy_threshold": 521.3780517578125, |
|
"eval_Qnli-dev_max_ap": 0.5901977757397192, |
|
"eval_Qnli-dev_max_f1": 0.6347469220246238, |
|
"eval_Qnli-dev_max_f1_threshold": 351.0001220703125, |
|
"eval_Qnli-dev_max_precision": 0.4686868686868687, |
|
"eval_Qnli-dev_max_recall": 1.0, |
|
"eval_allNLI-dev_cosine_accuracy": 0.681640625, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9692702293395996, |
|
"eval_allNLI-dev_cosine_ap": 0.43588706986821285, |
|
"eval_allNLI-dev_cosine_f1": 0.5211505922165821, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.8656014800071716, |
|
"eval_allNLI-dev_cosine_precision": 0.3684210526315789, |
|
"eval_allNLI-dev_cosine_recall": 0.8901734104046243, |
|
"eval_allNLI-dev_dot_accuracy": 0.66796875, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 785.7974243164062, |
|
"eval_allNLI-dev_dot_ap": 0.36610448753393265, |
|
"eval_allNLI-dev_dot_f1": 0.5118343195266272, |
|
"eval_allNLI-dev_dot_f1_threshold": 447.95782470703125, |
|
"eval_allNLI-dev_dot_precision": 0.34393638170974156, |
|
"eval_allNLI-dev_dot_recall": 1.0, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.677734375, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 6.728440284729004, |
|
"eval_allNLI-dev_euclidean_ap": 0.4317993856992305, |
|
"eval_allNLI-dev_euclidean_f1": 0.5214626391096979, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 15.216902732849121, |
|
"eval_allNLI-dev_euclidean_precision": 0.35964912280701755, |
|
"eval_allNLI-dev_euclidean_recall": 0.9479768786127167, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.673828125, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 101.38760375976562, |
|
"eval_allNLI-dev_manhattan_ap": 0.43314824920268913, |
|
"eval_allNLI-dev_manhattan_f1": 0.5157421289355323, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 286.70037841796875, |
|
"eval_allNLI-dev_manhattan_precision": 0.3481781376518219, |
|
"eval_allNLI-dev_manhattan_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_max_accuracy": 0.681640625, |
|
"eval_allNLI-dev_max_accuracy_threshold": 785.7974243164062, |
|
"eval_allNLI-dev_max_ap": 0.43588706986821285, |
|
"eval_allNLI-dev_max_f1": 0.5214626391096979, |
|
"eval_allNLI-dev_max_f1_threshold": 447.95782470703125, |
|
"eval_allNLI-dev_max_precision": 0.3684210526315789, |
|
"eval_allNLI-dev_max_recall": 1.0, |
|
"eval_sequential_score": 0.5901977757397192, |
|
"eval_sts-test_pearson_cosine": 0.20493152812453436, |
|
"eval_sts-test_pearson_dot": 0.21775282373018834, |
|
"eval_sts-test_pearson_euclidean": 0.2301348094949043, |
|
"eval_sts-test_pearson_manhattan": 0.23840753470415393, |
|
"eval_sts-test_pearson_max": 0.23840753470415393, |
|
"eval_sts-test_spearman_cosine": 0.28828077894040277, |
|
"eval_sts-test_spearman_dot": 0.23061179744080743, |
|
"eval_sts-test_spearman_euclidean": 0.26600149771047865, |
|
"eval_sts-test_spearman_manhattan": 0.26965490118794977, |
|
"eval_sts-test_spearman_max": 0.28828077894040277, |
|
"eval_vitaminc-pairs_loss": 3.7227895259857178, |
|
"eval_vitaminc-pairs_runtime": 5.4557, |
|
"eval_vitaminc-pairs_samples_per_second": 23.462, |
|
"eval_vitaminc-pairs_steps_per_second": 0.367, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13964313421256788, |
|
"eval_negation-triplets_loss": 4.880922794342041, |
|
"eval_negation-triplets_runtime": 1.0642, |
|
"eval_negation-triplets_samples_per_second": 120.279, |
|
"eval_negation-triplets_steps_per_second": 1.879, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13964313421256788, |
|
"eval_scitail-pairs-pos_loss": 1.6082611083984375, |
|
"eval_scitail-pairs-pos_runtime": 1.2328, |
|
"eval_scitail-pairs-pos_samples_per_second": 103.83, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.622, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13964313421256788, |
|
"eval_scitail-pairs-qa_loss": 2.3117823600769043, |
|
"eval_scitail-pairs-qa_runtime": 0.763, |
|
"eval_scitail-pairs-qa_samples_per_second": 167.752, |
|
"eval_scitail-pairs-qa_steps_per_second": 2.621, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13964313421256788, |
|
"eval_xsum-pairs_loss": 4.128580093383789, |
|
"eval_xsum-pairs_runtime": 3.9087, |
|
"eval_xsum-pairs_samples_per_second": 32.748, |
|
"eval_xsum-pairs_steps_per_second": 0.512, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13964313421256788, |
|
"eval_sciq_pairs_loss": 0.4913565516471863, |
|
"eval_sciq_pairs_runtime": 5.8016, |
|
"eval_sciq_pairs_samples_per_second": 22.063, |
|
"eval_sciq_pairs_steps_per_second": 0.345, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13964313421256788, |
|
"eval_qasc_pairs_loss": 4.587019920349121, |
|
"eval_qasc_pairs_runtime": 0.8933, |
|
"eval_qasc_pairs_samples_per_second": 143.294, |
|
"eval_qasc_pairs_steps_per_second": 2.239, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13964313421256788, |
|
"eval_openbookqa_pairs_loss": 5.624441146850586, |
|
"eval_openbookqa_pairs_runtime": 0.852, |
|
"eval_openbookqa_pairs_samples_per_second": 150.235, |
|
"eval_openbookqa_pairs_steps_per_second": 2.347, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13964313421256788, |
|
"eval_msmarco_pairs_loss": 5.057087421417236, |
|
"eval_msmarco_pairs_runtime": 1.965, |
|
"eval_msmarco_pairs_samples_per_second": 65.14, |
|
"eval_msmarco_pairs_steps_per_second": 1.018, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13964313421256788, |
|
"eval_nq_pairs_loss": 5.1600823402404785, |
|
"eval_nq_pairs_runtime": 3.3352, |
|
"eval_nq_pairs_samples_per_second": 38.378, |
|
"eval_nq_pairs_steps_per_second": 0.6, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13964313421256788, |
|
"eval_trivia_pairs_loss": 5.498032093048096, |
|
"eval_trivia_pairs_runtime": 4.4801, |
|
"eval_trivia_pairs_samples_per_second": 28.57, |
|
"eval_trivia_pairs_steps_per_second": 0.446, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13964313421256788, |
|
"eval_gooaq_pairs_loss": 4.9159464836120605, |
|
"eval_gooaq_pairs_runtime": 1.4649, |
|
"eval_gooaq_pairs_samples_per_second": 87.38, |
|
"eval_gooaq_pairs_steps_per_second": 1.365, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13964313421256788, |
|
"eval_paws-pos_loss": 0.44706642627716064, |
|
"eval_paws-pos_runtime": 1.0313, |
|
"eval_paws-pos_samples_per_second": 124.114, |
|
"eval_paws-pos_steps_per_second": 1.939, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13964313421256788, |
|
"eval_global_dataset_loss": 3.263042688369751, |
|
"eval_global_dataset_runtime": 19.4013, |
|
"eval_global_dataset_samples_per_second": 21.442, |
|
"eval_global_dataset_steps_per_second": 0.361, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1404189294026377, |
|
"grad_norm": 17.023818969726562, |
|
"learning_rate": 7.361927144535839e-06, |
|
"loss": 4.17, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.14119472459270752, |
|
"grad_norm": 22.472793579101562, |
|
"learning_rate": 7.403055229142185e-06, |
|
"loss": 4.8977, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.14197051978277736, |
|
"grad_norm": 14.055580139160156, |
|
"learning_rate": 7.444183313748531e-06, |
|
"loss": 3.5031, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.14274631497284718, |
|
"grad_norm": 14.841517448425293, |
|
"learning_rate": 7.485311398354876e-06, |
|
"loss": 3.4286, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.143522110162917, |
|
"grad_norm": 21.218738555908203, |
|
"learning_rate": 7.526439482961222e-06, |
|
"loss": 3.2536, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1442979053529868, |
|
"grad_norm": 14.635804176330566, |
|
"learning_rate": 7.567567567567567e-06, |
|
"loss": 4.1632, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.14507370054305663, |
|
"grad_norm": 16.717111587524414, |
|
"learning_rate": 7.608695652173912e-06, |
|
"loss": 3.6512, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.14584949573312644, |
|
"grad_norm": 20.993993759155273, |
|
"learning_rate": 7.649823736780258e-06, |
|
"loss": 3.3453, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.1466252909231963, |
|
"grad_norm": 21.484180450439453, |
|
"learning_rate": 7.690951821386603e-06, |
|
"loss": 4.9785, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.1474010861132661, |
|
"grad_norm": 14.600364685058594, |
|
"learning_rate": 7.732079905992948e-06, |
|
"loss": 3.1781, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.14817688130333592, |
|
"grad_norm": 17.323484420776367, |
|
"learning_rate": 7.773207990599295e-06, |
|
"loss": 3.6681, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.14895267649340574, |
|
"grad_norm": 17.703405380249023, |
|
"learning_rate": 7.814336075205639e-06, |
|
"loss": 4.5109, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.14972847168347556, |
|
"grad_norm": 15.650117874145508, |
|
"learning_rate": 7.855464159811986e-06, |
|
"loss": 4.4412, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.15050426687354537, |
|
"grad_norm": 30.876625061035156, |
|
"learning_rate": 7.896592244418331e-06, |
|
"loss": 5.1287, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.15128006206361522, |
|
"grad_norm": 18.507810592651367, |
|
"learning_rate": 7.937720329024676e-06, |
|
"loss": 4.2127, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.15205585725368503, |
|
"grad_norm": 14.450047492980957, |
|
"learning_rate": 7.978848413631022e-06, |
|
"loss": 3.1766, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.15283165244375485, |
|
"grad_norm": 20.77067756652832, |
|
"learning_rate": 8.019976498237367e-06, |
|
"loss": 2.5901, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.15360744763382467, |
|
"grad_norm": 12.4572114944458, |
|
"learning_rate": 8.061104582843712e-06, |
|
"loss": 3.5886, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.15438324282389448, |
|
"grad_norm": 15.76103687286377, |
|
"learning_rate": 8.102232667450058e-06, |
|
"loss": 2.9085, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.1551590380139643, |
|
"grad_norm": 14.595601081848145, |
|
"learning_rate": 8.143360752056405e-06, |
|
"loss": 2.4958, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1551590380139643, |
|
"eval_Qnli-dev_cosine_accuracy": 0.615234375, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.8780630826950073, |
|
"eval_Qnli-dev_cosine_ap": 0.6112556879875849, |
|
"eval_Qnli-dev_cosine_f1": 0.6363636363636364, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.7175576090812683, |
|
"eval_Qnli-dev_cosine_precision": 0.4714285714285714, |
|
"eval_Qnli-dev_cosine_recall": 0.9788135593220338, |
|
"eval_Qnli-dev_dot_accuracy": 0.580078125, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 530.33447265625, |
|
"eval_Qnli-dev_dot_ap": 0.5372984783278565, |
|
"eval_Qnli-dev_dot_f1": 0.6305555555555555, |
|
"eval_Qnli-dev_dot_f1_threshold": 403.0582275390625, |
|
"eval_Qnli-dev_dot_precision": 0.4690082644628099, |
|
"eval_Qnli-dev_dot_recall": 0.961864406779661, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.619140625, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 12.641845703125, |
|
"eval_Qnli-dev_euclidean_ap": 0.6133386902087727, |
|
"eval_Qnli-dev_euclidean_f1": 0.6331521739130436, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 19.538082122802734, |
|
"eval_Qnli-dev_euclidean_precision": 0.466, |
|
"eval_Qnli-dev_euclidean_recall": 0.9872881355932204, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.619140625, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 190.95291137695312, |
|
"eval_Qnli-dev_manhattan_ap": 0.6200633454752746, |
|
"eval_Qnli-dev_manhattan_f1": 0.6364883401920438, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 324.4531555175781, |
|
"eval_Qnli-dev_manhattan_precision": 0.47058823529411764, |
|
"eval_Qnli-dev_manhattan_recall": 0.9830508474576272, |
|
"eval_Qnli-dev_max_accuracy": 0.619140625, |
|
"eval_Qnli-dev_max_accuracy_threshold": 530.33447265625, |
|
"eval_Qnli-dev_max_ap": 0.6200633454752746, |
|
"eval_Qnli-dev_max_f1": 0.6364883401920438, |
|
"eval_Qnli-dev_max_f1_threshold": 403.0582275390625, |
|
"eval_Qnli-dev_max_precision": 0.4714285714285714, |
|
"eval_Qnli-dev_max_recall": 0.9872881355932204, |
|
"eval_allNLI-dev_cosine_accuracy": 0.6875, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.976324200630188, |
|
"eval_allNLI-dev_cosine_ap": 0.45960393014401535, |
|
"eval_allNLI-dev_cosine_f1": 0.5253164556962024, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.8385776877403259, |
|
"eval_allNLI-dev_cosine_precision": 0.3616557734204793, |
|
"eval_allNLI-dev_cosine_recall": 0.9595375722543352, |
|
"eval_allNLI-dev_dot_accuracy": 0.677734375, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 772.0188598632812, |
|
"eval_allNLI-dev_dot_ap": 0.3804150461800332, |
|
"eval_allNLI-dev_dot_f1": 0.5080763582966226, |
|
"eval_allNLI-dev_dot_f1_threshold": 444.7850646972656, |
|
"eval_allNLI-dev_dot_precision": 0.3405511811023622, |
|
"eval_allNLI-dev_dot_recall": 1.0, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.685546875, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 5.875195503234863, |
|
"eval_allNLI-dev_euclidean_ap": 0.4551932697343415, |
|
"eval_allNLI-dev_euclidean_f1": 0.5257903494176372, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 13.960739135742188, |
|
"eval_allNLI-dev_euclidean_precision": 0.3691588785046729, |
|
"eval_allNLI-dev_euclidean_recall": 0.9132947976878613, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.681640625, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 96.69795227050781, |
|
"eval_allNLI-dev_manhattan_ap": 0.4537023325460285, |
|
"eval_allNLI-dev_manhattan_f1": 0.5204081632653061, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 222.21884155273438, |
|
"eval_allNLI-dev_manhattan_precision": 0.3686746987951807, |
|
"eval_allNLI-dev_manhattan_recall": 0.884393063583815, |
|
"eval_allNLI-dev_max_accuracy": 0.6875, |
|
"eval_allNLI-dev_max_accuracy_threshold": 772.0188598632812, |
|
"eval_allNLI-dev_max_ap": 0.45960393014401535, |
|
"eval_allNLI-dev_max_f1": 0.5257903494176372, |
|
"eval_allNLI-dev_max_f1_threshold": 444.7850646972656, |
|
"eval_allNLI-dev_max_precision": 0.3691588785046729, |
|
"eval_allNLI-dev_max_recall": 1.0, |
|
"eval_sequential_score": 0.6200633454752746, |
|
"eval_sts-test_pearson_cosine": 0.24924277969472153, |
|
"eval_sts-test_pearson_dot": 0.20632094538884468, |
|
"eval_sts-test_pearson_euclidean": 0.2775041116953867, |
|
"eval_sts-test_pearson_manhattan": 0.29073367102351505, |
|
"eval_sts-test_pearson_max": 0.29073367102351505, |
|
"eval_sts-test_spearman_cosine": 0.3391141842675074, |
|
"eval_sts-test_spearman_dot": 0.21011509320588695, |
|
"eval_sts-test_spearman_euclidean": 0.31565870563443316, |
|
"eval_sts-test_spearman_manhattan": 0.3219084430440491, |
|
"eval_sts-test_spearman_max": 0.3391141842675074, |
|
"eval_vitaminc-pairs_loss": 3.716855764389038, |
|
"eval_vitaminc-pairs_runtime": 5.4756, |
|
"eval_vitaminc-pairs_samples_per_second": 23.377, |
|
"eval_vitaminc-pairs_steps_per_second": 0.365, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1551590380139643, |
|
"eval_negation-triplets_loss": 4.766229152679443, |
|
"eval_negation-triplets_runtime": 1.0705, |
|
"eval_negation-triplets_samples_per_second": 119.57, |
|
"eval_negation-triplets_steps_per_second": 1.868, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1551590380139643, |
|
"eval_scitail-pairs-pos_loss": 0.9834614992141724, |
|
"eval_scitail-pairs-pos_runtime": 1.2447, |
|
"eval_scitail-pairs-pos_samples_per_second": 102.839, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.607, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1551590380139643, |
|
"eval_scitail-pairs-qa_loss": 1.3808467388153076, |
|
"eval_scitail-pairs-qa_runtime": 0.7767, |
|
"eval_scitail-pairs-qa_samples_per_second": 164.794, |
|
"eval_scitail-pairs-qa_steps_per_second": 2.575, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1551590380139643, |
|
"eval_xsum-pairs_loss": 3.506582260131836, |
|
"eval_xsum-pairs_runtime": 3.9348, |
|
"eval_xsum-pairs_samples_per_second": 32.53, |
|
"eval_xsum-pairs_steps_per_second": 0.508, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1551590380139643, |
|
"eval_sciq_pairs_loss": 0.38454076647758484, |
|
"eval_sciq_pairs_runtime": 5.8783, |
|
"eval_sciq_pairs_samples_per_second": 21.775, |
|
"eval_sciq_pairs_steps_per_second": 0.34, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1551590380139643, |
|
"eval_qasc_pairs_loss": 3.758909225463867, |
|
"eval_qasc_pairs_runtime": 0.9063, |
|
"eval_qasc_pairs_samples_per_second": 141.232, |
|
"eval_qasc_pairs_steps_per_second": 2.207, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1551590380139643, |
|
"eval_openbookqa_pairs_loss": 4.948967456817627, |
|
"eval_openbookqa_pairs_runtime": 0.8606, |
|
"eval_openbookqa_pairs_samples_per_second": 148.737, |
|
"eval_openbookqa_pairs_steps_per_second": 2.324, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1551590380139643, |
|
"eval_msmarco_pairs_loss": 4.785572528839111, |
|
"eval_msmarco_pairs_runtime": 1.9606, |
|
"eval_msmarco_pairs_samples_per_second": 65.286, |
|
"eval_msmarco_pairs_steps_per_second": 1.02, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1551590380139643, |
|
"eval_nq_pairs_loss": 4.675125598907471, |
|
"eval_nq_pairs_runtime": 3.3155, |
|
"eval_nq_pairs_samples_per_second": 38.607, |
|
"eval_nq_pairs_steps_per_second": 0.603, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1551590380139643, |
|
"eval_trivia_pairs_loss": 4.418967247009277, |
|
"eval_trivia_pairs_runtime": 4.478, |
|
"eval_trivia_pairs_samples_per_second": 28.584, |
|
"eval_trivia_pairs_steps_per_second": 0.447, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1551590380139643, |
|
"eval_gooaq_pairs_loss": 4.486730575561523, |
|
"eval_gooaq_pairs_runtime": 1.4616, |
|
"eval_gooaq_pairs_samples_per_second": 87.578, |
|
"eval_gooaq_pairs_steps_per_second": 1.368, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1551590380139643, |
|
"eval_paws-pos_loss": 0.4021705687046051, |
|
"eval_paws-pos_runtime": 1.0276, |
|
"eval_paws-pos_samples_per_second": 124.562, |
|
"eval_paws-pos_steps_per_second": 1.946, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1551590380139643, |
|
"eval_global_dataset_loss": 2.6410105228424072, |
|
"eval_global_dataset_runtime": 19.3933, |
|
"eval_global_dataset_samples_per_second": 21.451, |
|
"eval_global_dataset_steps_per_second": 0.361, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.15593483320403415, |
|
"grad_norm": 36.21416091918945, |
|
"learning_rate": 8.184488836662748e-06, |
|
"loss": 5.5704, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.15671062839410396, |
|
"grad_norm": 15.138063430786133, |
|
"learning_rate": 8.225616921269095e-06, |
|
"loss": 2.8612, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.15748642358417378, |
|
"grad_norm": 27.27367401123047, |
|
"learning_rate": 8.26674500587544e-06, |
|
"loss": 4.8846, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.1582622187742436, |
|
"grad_norm": 19.24480438232422, |
|
"learning_rate": 8.307873090481786e-06, |
|
"loss": 3.1182, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.1590380139643134, |
|
"grad_norm": 18.584644317626953, |
|
"learning_rate": 8.349001175088131e-06, |
|
"loss": 1.9789, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.15981380915438323, |
|
"grad_norm": 15.61733341217041, |
|
"learning_rate": 8.390129259694476e-06, |
|
"loss": 2.9743, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.16058960434445307, |
|
"grad_norm": 16.221357345581055, |
|
"learning_rate": 8.431257344300822e-06, |
|
"loss": 3.6543, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.1613653995345229, |
|
"grad_norm": 13.963678359985352, |
|
"learning_rate": 8.472385428907167e-06, |
|
"loss": 3.117, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.1621411947245927, |
|
"grad_norm": 18.764089584350586, |
|
"learning_rate": 8.513513513513514e-06, |
|
"loss": 1.5977, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.16291698991466252, |
|
"grad_norm": 29.377960205078125, |
|
"learning_rate": 8.554641598119857e-06, |
|
"loss": 5.1443, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.16369278510473234, |
|
"grad_norm": 13.549287796020508, |
|
"learning_rate": 8.595769682726204e-06, |
|
"loss": 0.668, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.16446858029480219, |
|
"grad_norm": 14.000697135925293, |
|
"learning_rate": 8.636897767332548e-06, |
|
"loss": 0.7522, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.165244375484872, |
|
"grad_norm": 18.982425689697266, |
|
"learning_rate": 8.678025851938895e-06, |
|
"loss": 2.3447, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.16602017067494182, |
|
"grad_norm": 13.098822593688965, |
|
"learning_rate": 8.71915393654524e-06, |
|
"loss": 0.5818, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.16679596586501164, |
|
"grad_norm": 17.622783660888672, |
|
"learning_rate": 8.760282021151586e-06, |
|
"loss": 3.556, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.16757176105508145, |
|
"grad_norm": 24.308530807495117, |
|
"learning_rate": 8.801410105757933e-06, |
|
"loss": 2.8425, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.16834755624515127, |
|
"grad_norm": 41.18635559082031, |
|
"learning_rate": 8.842538190364276e-06, |
|
"loss": 5.4947, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.16912335143522111, |
|
"grad_norm": 38.11179733276367, |
|
"learning_rate": 8.883666274970621e-06, |
|
"loss": 4.2956, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.16989914662529093, |
|
"grad_norm": 20.423124313354492, |
|
"learning_rate": 8.924794359576967e-06, |
|
"loss": 2.1325, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.17067494181536075, |
|
"grad_norm": 19.0192813873291, |
|
"learning_rate": 8.965922444183314e-06, |
|
"loss": 1.4286, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17067494181536075, |
|
"eval_Qnli-dev_cosine_accuracy": 0.625, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.8608779907226562, |
|
"eval_Qnli-dev_cosine_ap": 0.6155996680907755, |
|
"eval_Qnli-dev_cosine_f1": 0.6555183946488294, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.7760155200958252, |
|
"eval_Qnli-dev_cosine_precision": 0.5414364640883977, |
|
"eval_Qnli-dev_cosine_recall": 0.8305084745762712, |
|
"eval_Qnli-dev_dot_accuracy": 0.62109375, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 477.1441650390625, |
|
"eval_Qnli-dev_dot_ap": 0.5823923648803983, |
|
"eval_Qnli-dev_dot_f1": 0.6345029239766081, |
|
"eval_Qnli-dev_dot_f1_threshold": 358.5615234375, |
|
"eval_Qnli-dev_dot_precision": 0.484375, |
|
"eval_Qnli-dev_dot_recall": 0.9194915254237288, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.625, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 13.561055183410645, |
|
"eval_Qnli-dev_euclidean_ap": 0.6128136787518578, |
|
"eval_Qnli-dev_euclidean_f1": 0.6476510067114094, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 15.50450325012207, |
|
"eval_Qnli-dev_euclidean_precision": 0.5361111111111111, |
|
"eval_Qnli-dev_euclidean_recall": 0.8177966101694916, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.619140625, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 215.1610565185547, |
|
"eval_Qnli-dev_manhattan_ap": 0.621124218438047, |
|
"eval_Qnli-dev_manhattan_f1": 0.645484949832776, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 273.8544921875, |
|
"eval_Qnli-dev_manhattan_precision": 0.5331491712707183, |
|
"eval_Qnli-dev_manhattan_recall": 0.8177966101694916, |
|
"eval_Qnli-dev_max_accuracy": 0.625, |
|
"eval_Qnli-dev_max_accuracy_threshold": 477.1441650390625, |
|
"eval_Qnli-dev_max_ap": 0.621124218438047, |
|
"eval_Qnli-dev_max_f1": 0.6555183946488294, |
|
"eval_Qnli-dev_max_f1_threshold": 358.5615234375, |
|
"eval_Qnli-dev_max_precision": 0.5414364640883977, |
|
"eval_Qnli-dev_max_recall": 0.9194915254237288, |
|
"eval_allNLI-dev_cosine_accuracy": 0.685546875, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9713066816329956, |
|
"eval_allNLI-dev_cosine_ap": 0.48223591780287633, |
|
"eval_allNLI-dev_cosine_f1": 0.5408805031446541, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.8952139616012573, |
|
"eval_allNLI-dev_cosine_precision": 0.4243421052631579, |
|
"eval_allNLI-dev_cosine_recall": 0.7456647398843931, |
|
"eval_allNLI-dev_dot_accuracy": 0.677734375, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 741.37939453125, |
|
"eval_allNLI-dev_dot_ap": 0.3936517100476376, |
|
"eval_allNLI-dev_dot_f1": 0.5074626865671642, |
|
"eval_allNLI-dev_dot_f1_threshold": 416.87164306640625, |
|
"eval_allNLI-dev_dot_precision": 0.3420523138832998, |
|
"eval_allNLI-dev_dot_recall": 0.9826589595375722, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.685546875, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 6.748725891113281, |
|
"eval_allNLI-dev_euclidean_ap": 0.47719987976648454, |
|
"eval_allNLI-dev_euclidean_f1": 0.5415860735009671, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 12.946854591369629, |
|
"eval_allNLI-dev_euclidean_precision": 0.4069767441860465, |
|
"eval_allNLI-dev_euclidean_recall": 0.8092485549132948, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.685546875, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 110.15467834472656, |
|
"eval_allNLI-dev_manhattan_ap": 0.4846184039440549, |
|
"eval_allNLI-dev_manhattan_f1": 0.5335753176043557, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 234.3075408935547, |
|
"eval_allNLI-dev_manhattan_precision": 0.3888888888888889, |
|
"eval_allNLI-dev_manhattan_recall": 0.8497109826589595, |
|
"eval_allNLI-dev_max_accuracy": 0.685546875, |
|
"eval_allNLI-dev_max_accuracy_threshold": 741.37939453125, |
|
"eval_allNLI-dev_max_ap": 0.4846184039440549, |
|
"eval_allNLI-dev_max_f1": 0.5415860735009671, |
|
"eval_allNLI-dev_max_f1_threshold": 416.87164306640625, |
|
"eval_allNLI-dev_max_precision": 0.4243421052631579, |
|
"eval_allNLI-dev_max_recall": 0.9826589595375722, |
|
"eval_sequential_score": 0.621124218438047, |
|
"eval_sts-test_pearson_cosine": 0.31919410971368356, |
|
"eval_sts-test_pearson_dot": 0.21556048485576235, |
|
"eval_sts-test_pearson_euclidean": 0.35723202726898373, |
|
"eval_sts-test_pearson_manhattan": 0.36765421659339287, |
|
"eval_sts-test_pearson_max": 0.36765421659339287, |
|
"eval_sts-test_spearman_cosine": 0.41124188237768894, |
|
"eval_sts-test_spearman_dot": 0.2248200891915325, |
|
"eval_sts-test_spearman_euclidean": 0.389006828577642, |
|
"eval_sts-test_spearman_manhattan": 0.39845426736998035, |
|
"eval_sts-test_spearman_max": 0.41124188237768894, |
|
"eval_vitaminc-pairs_loss": 4.022590160369873, |
|
"eval_vitaminc-pairs_runtime": 5.4704, |
|
"eval_vitaminc-pairs_samples_per_second": 23.399, |
|
"eval_vitaminc-pairs_steps_per_second": 0.366, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17067494181536075, |
|
"eval_negation-triplets_loss": 4.421084403991699, |
|
"eval_negation-triplets_runtime": 1.0619, |
|
"eval_negation-triplets_samples_per_second": 120.534, |
|
"eval_negation-triplets_steps_per_second": 1.883, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17067494181536075, |
|
"eval_scitail-pairs-pos_loss": 0.7828177809715271, |
|
"eval_scitail-pairs-pos_runtime": 1.2253, |
|
"eval_scitail-pairs-pos_samples_per_second": 104.468, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.632, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17067494181536075, |
|
"eval_scitail-pairs-qa_loss": 0.9572672843933105, |
|
"eval_scitail-pairs-qa_runtime": 0.7677, |
|
"eval_scitail-pairs-qa_samples_per_second": 166.739, |
|
"eval_scitail-pairs-qa_steps_per_second": 2.605, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17067494181536075, |
|
"eval_xsum-pairs_loss": 3.0789806842803955, |
|
"eval_xsum-pairs_runtime": 3.9039, |
|
"eval_xsum-pairs_samples_per_second": 32.788, |
|
"eval_xsum-pairs_steps_per_second": 0.512, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17067494181536075, |
|
"eval_sciq_pairs_loss": 0.39149752259254456, |
|
"eval_sciq_pairs_runtime": 5.8117, |
|
"eval_sciq_pairs_samples_per_second": 22.024, |
|
"eval_sciq_pairs_steps_per_second": 0.344, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17067494181536075, |
|
"eval_qasc_pairs_loss": 3.374030828475952, |
|
"eval_qasc_pairs_runtime": 0.9033, |
|
"eval_qasc_pairs_samples_per_second": 141.696, |
|
"eval_qasc_pairs_steps_per_second": 2.214, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17067494181536075, |
|
"eval_openbookqa_pairs_loss": 5.161572456359863, |
|
"eval_openbookqa_pairs_runtime": 0.8619, |
|
"eval_openbookqa_pairs_samples_per_second": 148.506, |
|
"eval_openbookqa_pairs_steps_per_second": 2.32, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17067494181536075, |
|
"eval_msmarco_pairs_loss": 4.489230632781982, |
|
"eval_msmarco_pairs_runtime": 1.9668, |
|
"eval_msmarco_pairs_samples_per_second": 65.079, |
|
"eval_msmarco_pairs_steps_per_second": 1.017, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17067494181536075, |
|
"eval_nq_pairs_loss": 4.549116611480713, |
|
"eval_nq_pairs_runtime": 3.3346, |
|
"eval_nq_pairs_samples_per_second": 38.385, |
|
"eval_nq_pairs_steps_per_second": 0.6, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17067494181536075, |
|
"eval_trivia_pairs_loss": 5.130815029144287, |
|
"eval_trivia_pairs_runtime": 4.4818, |
|
"eval_trivia_pairs_samples_per_second": 28.56, |
|
"eval_trivia_pairs_steps_per_second": 0.446, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17067494181536075, |
|
"eval_gooaq_pairs_loss": 3.8106689453125, |
|
"eval_gooaq_pairs_runtime": 1.4609, |
|
"eval_gooaq_pairs_samples_per_second": 87.615, |
|
"eval_gooaq_pairs_steps_per_second": 1.369, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17067494181536075, |
|
"eval_paws-pos_loss": 0.1346072554588318, |
|
"eval_paws-pos_runtime": 1.0308, |
|
"eval_paws-pos_samples_per_second": 124.178, |
|
"eval_paws-pos_steps_per_second": 1.94, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17067494181536075, |
|
"eval_global_dataset_loss": 2.3529915809631348, |
|
"eval_global_dataset_runtime": 19.4389, |
|
"eval_global_dataset_samples_per_second": 21.4, |
|
"eval_global_dataset_steps_per_second": 0.36, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17145073700543056, |
|
"grad_norm": 24.33696174621582, |
|
"learning_rate": 9.007050528789659e-06, |
|
"loss": 2.2834, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.17222653219550038, |
|
"grad_norm": 19.657947540283203, |
|
"learning_rate": 9.048178613396003e-06, |
|
"loss": 3.4539, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.1730023273855702, |
|
"grad_norm": 22.255239486694336, |
|
"learning_rate": 9.08930669800235e-06, |
|
"loss": 2.7994, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.17377812257564004, |
|
"grad_norm": 20.619884490966797, |
|
"learning_rate": 9.130434782608695e-06, |
|
"loss": 2.463, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.17455391776570986, |
|
"grad_norm": 22.244462966918945, |
|
"learning_rate": 9.17156286721504e-06, |
|
"loss": 4.1749, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.17532971295577968, |
|
"grad_norm": 25.1329402923584, |
|
"learning_rate": 9.212690951821385e-06, |
|
"loss": 3.7001, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.1761055081458495, |
|
"grad_norm": 35.8182258605957, |
|
"learning_rate": 9.253819036427732e-06, |
|
"loss": 5.6003, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.1768813033359193, |
|
"grad_norm": 33.197845458984375, |
|
"learning_rate": 9.294947121034076e-06, |
|
"loss": 5.5236, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.17765709852598913, |
|
"grad_norm": 14.53373908996582, |
|
"learning_rate": 9.336075205640421e-06, |
|
"loss": 2.8411, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.17843289371605897, |
|
"grad_norm": 8.925052642822266, |
|
"learning_rate": 9.377203290246768e-06, |
|
"loss": 0.3611, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.1792086889061288, |
|
"grad_norm": 19.080123901367188, |
|
"learning_rate": 9.418331374853114e-06, |
|
"loss": 3.4145, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.1799844840961986, |
|
"grad_norm": 16.67955780029297, |
|
"learning_rate": 9.459459459459459e-06, |
|
"loss": 2.7527, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.18076027928626842, |
|
"grad_norm": 13.526026725769043, |
|
"learning_rate": 9.500587544065804e-06, |
|
"loss": 1.977, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.18153607447633824, |
|
"grad_norm": 12.823989868164062, |
|
"learning_rate": 9.54171562867215e-06, |
|
"loss": 1.9694, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.18231186966640806, |
|
"grad_norm": 14.984912872314453, |
|
"learning_rate": 9.582843713278495e-06, |
|
"loss": 2.4804, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.1830876648564779, |
|
"grad_norm": 13.724047660827637, |
|
"learning_rate": 9.62397179788484e-06, |
|
"loss": 2.7869, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.18386346004654772, |
|
"grad_norm": 14.626879692077637, |
|
"learning_rate": 9.665099882491187e-06, |
|
"loss": 2.0562, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.18463925523661753, |
|
"grad_norm": 8.06078815460205, |
|
"learning_rate": 9.706227967097532e-06, |
|
"loss": 0.3609, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.18541505042668735, |
|
"grad_norm": 13.897974014282227, |
|
"learning_rate": 9.747356051703878e-06, |
|
"loss": 1.9205, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.18619084561675717, |
|
"grad_norm": 6.743955612182617, |
|
"learning_rate": 9.788484136310221e-06, |
|
"loss": 0.2964, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18619084561675717, |
|
"eval_Qnli-dev_cosine_accuracy": 0.62890625, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.8693021535873413, |
|
"eval_Qnli-dev_cosine_ap": 0.611781004288293, |
|
"eval_Qnli-dev_cosine_f1": 0.6366559485530547, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.7878068685531616, |
|
"eval_Qnli-dev_cosine_precision": 0.5129533678756477, |
|
"eval_Qnli-dev_cosine_recall": 0.8389830508474576, |
|
"eval_Qnli-dev_dot_accuracy": 0.595703125, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 521.8904418945312, |
|
"eval_Qnli-dev_dot_ap": 0.5496339935508336, |
|
"eval_Qnli-dev_dot_f1": 0.6397694524495677, |
|
"eval_Qnli-dev_dot_f1_threshold": 375.82940673828125, |
|
"eval_Qnli-dev_dot_precision": 0.4847161572052402, |
|
"eval_Qnli-dev_dot_recall": 0.940677966101695, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.626953125, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 11.786832809448242, |
|
"eval_Qnli-dev_euclidean_ap": 0.616692733684346, |
|
"eval_Qnli-dev_euclidean_f1": 0.6368159203980099, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 15.11730670928955, |
|
"eval_Qnli-dev_euclidean_precision": 0.5231607629427792, |
|
"eval_Qnli-dev_euclidean_recall": 0.8135593220338984, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.6171875, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 198.48147583007812, |
|
"eval_Qnli-dev_manhattan_ap": 0.6210006336689851, |
|
"eval_Qnli-dev_manhattan_f1": 0.6374367622259697, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 262.7891845703125, |
|
"eval_Qnli-dev_manhattan_precision": 0.5294117647058824, |
|
"eval_Qnli-dev_manhattan_recall": 0.8008474576271186, |
|
"eval_Qnli-dev_max_accuracy": 0.62890625, |
|
"eval_Qnli-dev_max_accuracy_threshold": 521.8904418945312, |
|
"eval_Qnli-dev_max_ap": 0.6210006336689851, |
|
"eval_Qnli-dev_max_f1": 0.6397694524495677, |
|
"eval_Qnli-dev_max_f1_threshold": 375.82940673828125, |
|
"eval_Qnli-dev_max_precision": 0.5294117647058824, |
|
"eval_Qnli-dev_max_recall": 0.940677966101695, |
|
"eval_allNLI-dev_cosine_accuracy": 0.6875, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9714217782020569, |
|
"eval_allNLI-dev_cosine_ap": 0.5054463009748529, |
|
"eval_allNLI-dev_cosine_f1": 0.5534591194968553, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.891494631767273, |
|
"eval_allNLI-dev_cosine_precision": 0.4342105263157895, |
|
"eval_allNLI-dev_cosine_recall": 0.7630057803468208, |
|
"eval_allNLI-dev_dot_accuracy": 0.6796875, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 697.1558837890625, |
|
"eval_allNLI-dev_dot_ap": 0.40155312352801564, |
|
"eval_allNLI-dev_dot_f1": 0.5088235294117648, |
|
"eval_allNLI-dev_dot_f1_threshold": 376.5243225097656, |
|
"eval_allNLI-dev_dot_precision": 0.34122287968441817, |
|
"eval_allNLI-dev_dot_recall": 1.0, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.685546875, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 6.3458075523376465, |
|
"eval_allNLI-dev_euclidean_ap": 0.5028194115939991, |
|
"eval_allNLI-dev_euclidean_f1": 0.5518590998043053, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 12.797416687011719, |
|
"eval_allNLI-dev_euclidean_precision": 0.4171597633136095, |
|
"eval_allNLI-dev_euclidean_recall": 0.815028901734104, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.689453125, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 145.72430419921875, |
|
"eval_allNLI-dev_manhattan_ap": 0.5074540422577104, |
|
"eval_allNLI-dev_manhattan_f1": 0.5382932166301969, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 210.015625, |
|
"eval_allNLI-dev_manhattan_precision": 0.43309859154929575, |
|
"eval_allNLI-dev_manhattan_recall": 0.7109826589595376, |
|
"eval_allNLI-dev_max_accuracy": 0.689453125, |
|
"eval_allNLI-dev_max_accuracy_threshold": 697.1558837890625, |
|
"eval_allNLI-dev_max_ap": 0.5074540422577104, |
|
"eval_allNLI-dev_max_f1": 0.5534591194968553, |
|
"eval_allNLI-dev_max_f1_threshold": 376.5243225097656, |
|
"eval_allNLI-dev_max_precision": 0.4342105263157895, |
|
"eval_allNLI-dev_max_recall": 1.0, |
|
"eval_sequential_score": 0.6210006336689851, |
|
"eval_sts-test_pearson_cosine": 0.437261385872042, |
|
"eval_sts-test_pearson_dot": 0.27944802216402886, |
|
"eval_sts-test_pearson_euclidean": 0.46065093061659046, |
|
"eval_sts-test_pearson_manhattan": 0.4698077558920942, |
|
"eval_sts-test_pearson_max": 0.4698077558920942, |
|
"eval_sts-test_spearman_cosine": 0.5068103175719304, |
|
"eval_sts-test_spearman_dot": 0.28160415924837434, |
|
"eval_sts-test_spearman_euclidean": 0.4813224156515044, |
|
"eval_sts-test_spearman_manhattan": 0.4888016492165877, |
|
"eval_sts-test_spearman_max": 0.5068103175719304, |
|
"eval_vitaminc-pairs_loss": 3.8961689472198486, |
|
"eval_vitaminc-pairs_runtime": 5.4898, |
|
"eval_vitaminc-pairs_samples_per_second": 23.316, |
|
"eval_vitaminc-pairs_steps_per_second": 0.364, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18619084561675717, |
|
"eval_negation-triplets_loss": 3.9956982135772705, |
|
"eval_negation-triplets_runtime": 1.0718, |
|
"eval_negation-triplets_samples_per_second": 119.43, |
|
"eval_negation-triplets_steps_per_second": 1.866, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18619084561675717, |
|
"eval_scitail-pairs-pos_loss": 0.6250349283218384, |
|
"eval_scitail-pairs-pos_runtime": 1.2283, |
|
"eval_scitail-pairs-pos_samples_per_second": 104.211, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.628, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18619084561675717, |
|
"eval_scitail-pairs-qa_loss": 0.8983888030052185, |
|
"eval_scitail-pairs-qa_runtime": 0.7728, |
|
"eval_scitail-pairs-qa_samples_per_second": 165.636, |
|
"eval_scitail-pairs-qa_steps_per_second": 2.588, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18619084561675717, |
|
"eval_xsum-pairs_loss": 2.681138038635254, |
|
"eval_xsum-pairs_runtime": 3.921, |
|
"eval_xsum-pairs_samples_per_second": 32.645, |
|
"eval_xsum-pairs_steps_per_second": 0.51, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18619084561675717, |
|
"eval_sciq_pairs_loss": 0.3189742863178253, |
|
"eval_sciq_pairs_runtime": 5.8916, |
|
"eval_sciq_pairs_samples_per_second": 21.726, |
|
"eval_sciq_pairs_steps_per_second": 0.339, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18619084561675717, |
|
"eval_qasc_pairs_loss": 2.76663875579834, |
|
"eval_qasc_pairs_runtime": 0.9119, |
|
"eval_qasc_pairs_samples_per_second": 140.374, |
|
"eval_qasc_pairs_steps_per_second": 2.193, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18619084561675717, |
|
"eval_openbookqa_pairs_loss": 4.003782749176025, |
|
"eval_openbookqa_pairs_runtime": 0.8645, |
|
"eval_openbookqa_pairs_samples_per_second": 148.058, |
|
"eval_openbookqa_pairs_steps_per_second": 2.313, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18619084561675717, |
|
"eval_msmarco_pairs_loss": 3.789357900619507, |
|
"eval_msmarco_pairs_runtime": 1.9837, |
|
"eval_msmarco_pairs_samples_per_second": 64.525, |
|
"eval_msmarco_pairs_steps_per_second": 1.008, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18619084561675717, |
|
"eval_nq_pairs_loss": 3.925625801086426, |
|
"eval_nq_pairs_runtime": 3.3218, |
|
"eval_nq_pairs_samples_per_second": 38.534, |
|
"eval_nq_pairs_steps_per_second": 0.602, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18619084561675717, |
|
"eval_trivia_pairs_loss": 3.9500904083251953, |
|
"eval_trivia_pairs_runtime": 4.4861, |
|
"eval_trivia_pairs_samples_per_second": 28.533, |
|
"eval_trivia_pairs_steps_per_second": 0.446, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18619084561675717, |
|
"eval_gooaq_pairs_loss": 3.2876720428466797, |
|
"eval_gooaq_pairs_runtime": 1.4821, |
|
"eval_gooaq_pairs_samples_per_second": 86.362, |
|
"eval_gooaq_pairs_steps_per_second": 1.349, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18619084561675717, |
|
"eval_paws-pos_loss": 0.12235681712627411, |
|
"eval_paws-pos_runtime": 1.0363, |
|
"eval_paws-pos_samples_per_second": 123.516, |
|
"eval_paws-pos_steps_per_second": 1.93, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18619084561675717, |
|
"eval_global_dataset_loss": 1.9579764604568481, |
|
"eval_global_dataset_runtime": 19.4, |
|
"eval_global_dataset_samples_per_second": 21.443, |
|
"eval_global_dataset_steps_per_second": 0.361, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.186966640806827, |
|
"grad_norm": 17.613515853881836, |
|
"learning_rate": 9.829612220916568e-06, |
|
"loss": 3.2402, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.18774243599689683, |
|
"grad_norm": 19.704069137573242, |
|
"learning_rate": 9.870740305522913e-06, |
|
"loss": 3.1076, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.18851823118696664, |
|
"grad_norm": 14.683489799499512, |
|
"learning_rate": 9.911868390129259e-06, |
|
"loss": 1.8656, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.18929402637703646, |
|
"grad_norm": 15.879508018493652, |
|
"learning_rate": 9.952996474735606e-06, |
|
"loss": 2.7912, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.19006982156710628, |
|
"grad_norm": 5.585937023162842, |
|
"learning_rate": 9.99412455934195e-06, |
|
"loss": 0.2569, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1908456167571761, |
|
"grad_norm": 18.365917205810547, |
|
"learning_rate": 1.0035252643948295e-05, |
|
"loss": 2.1095, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.19162141194724594, |
|
"grad_norm": 14.385225296020508, |
|
"learning_rate": 1.007638072855464e-05, |
|
"loss": 1.5261, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.19239720713731576, |
|
"grad_norm": 23.97905921936035, |
|
"learning_rate": 1.0117508813160987e-05, |
|
"loss": 3.4551, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.19317300232738557, |
|
"grad_norm": 15.02582836151123, |
|
"learning_rate": 1.0158636897767332e-05, |
|
"loss": 2.2465, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.1939487975174554, |
|
"grad_norm": 32.1656494140625, |
|
"learning_rate": 1.0199764982373677e-05, |
|
"loss": 4.7511, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1947245927075252, |
|
"grad_norm": 22.766891479492188, |
|
"learning_rate": 1.0240893066980021e-05, |
|
"loss": 2.9648, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.19550038789759502, |
|
"grad_norm": 22.01688575744629, |
|
"learning_rate": 1.0282021151586368e-05, |
|
"loss": 3.4702, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.19627618308766487, |
|
"grad_norm": 19.137022018432617, |
|
"learning_rate": 1.0323149236192713e-05, |
|
"loss": 2.2448, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.19705197827773469, |
|
"grad_norm": 26.455888748168945, |
|
"learning_rate": 1.0364277320799059e-05, |
|
"loss": 4.5872, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.1978277734678045, |
|
"grad_norm": 15.777259826660156, |
|
"learning_rate": 1.0405405405405406e-05, |
|
"loss": 1.8122, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.19860356865787432, |
|
"grad_norm": 17.67413902282715, |
|
"learning_rate": 1.0446533490011751e-05, |
|
"loss": 2.8551, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.19937936384794414, |
|
"grad_norm": 17.045608520507812, |
|
"learning_rate": 1.0487661574618094e-05, |
|
"loss": 2.6086, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.20015515903801395, |
|
"grad_norm": 14.350496292114258, |
|
"learning_rate": 1.052878965922444e-05, |
|
"loss": 1.232, |
|
"step": 258 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 2578, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 258, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 96, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|