File size: 10,886 Bytes
9738326 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 |
{
"checkpoint_path": "/data_2to/devel_data/nn_pruning/output/squad_test_final_fine_tune/fine_tuned_aws_nn-pruning-v10-a32-l5-dl0-5--2021-01-21--00-52-45/checkpoint-22132",
"config": {
"_name_or_path": "/tmp/tmp3c96kd08",
"architectures": ["BertForQuestionAnswering"],
"attention_probs_dropout_prob": 0.1,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"initializer_range": 0.02,
"intermediate_size": 3072,
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"pruned_heads": {
"0": [2, 4, 5, 6, 7],
"1": [0, 2, 3, 5, 6, 7, 8],
"10": [1, 4, 5, 6, 7],
"11": [0, 2, 5, 6, 7, 8, 11],
"2": [4, 7, 8],
"3": [2, 4, 6],
"4": [2],
"5": [1, 2],
"6": [2, 3, 7],
"7": [3, 6, 7, 11],
"8": [0, 4],
"9": [1, 4, 5, 7, 9, 10]
},
"type_vocab_size": 2,
"vocab_size": 30522
},
"eval_metrics": {
"exact_match": 81.69347209082308,
"f1": 88.72194531479171,
"main_metric": 88.72194531479171
},
"model_args": {
"cache_dir": null,
"config_name": null,
"model_name_or_path": "/home/lagunas/devel/hf/nn_pruning/nn_pruning/analysis/tmp_finetune",
"tokenizer_name": null
},
"source_checkpoint": "/data_2to/devel_data/nn_pruning/output/squad_test_aws/aws_nn-pruning-v10-a32-l5-dl0-5--2021-01-21--00-52-45/checkpoint-95000",
"sparse_args": {
"ampere_pruning_method": "disabled",
"attention_block_cols": 1,
"attention_block_rows": 1,
"attention_lambda": 1.0,
"attention_output_with_dense": 0,
"attention_pruning_method": "topK",
"bias_mask": true,
"dense_block_cols": 1,
"dense_block_rows": 1,
"dense_lambda": 1.0,
"dense_pruning_method": "topK",
"distil_alpha_ce": 0.1,
"distil_alpha_teacher": 0.9,
"distil_teacher_name_or_path": "csarron/bert-base-uncased-squad-v1",
"distil_temperature": 2.0,
"final_ampere_temperature": 20.0,
"final_finetune": 1,
"final_threshold": 0.5,
"final_warmup": 0,
"initial_ampere_temperature": 0.0,
"initial_threshold": 1.0,
"initial_warmup": 0,
"mask_init": "constant",
"mask_scale": 0.0,
"mask_scores_learning_rate": 0.01,
"regularization": "",
"regularization_final_lambda": 0
},
"speed": {
"cuda_eval_elapsed_time": 20.951393741607667,
"eval_elapsed_time": 28.213609586004168
},
"speedup": 1.8420919143305463,
"stats": {
"layers": {
"0": {
"linear_attention_nnz": 1376256,
"linear_attention_total": 2359296,
"linear_dense_nnz": 1125888,
"linear_dense_total": 4718592,
"linear_nnz": 2502144,
"linear_total": 7077888,
"nnz": 2508829,
"total": 7086912
},
"1": {
"linear_attention_nnz": 983040,
"linear_attention_total": 2359296,
"linear_dense_nnz": 1285632,
"linear_dense_total": 4718592,
"linear_nnz": 2268672,
"linear_total": 7077888,
"nnz": 2275077,
"total": 7086528
},
"10": {
"linear_attention_nnz": 1376256,
"linear_attention_total": 2359296,
"linear_dense_nnz": 168960,
"linear_dense_total": 4718592,
"linear_nnz": 1545216,
"linear_total": 7077888,
"nnz": 1551278,
"total": 7086912
},
"11": {
"linear_attention_nnz": 983040,
"linear_attention_total": 2359296,
"linear_dense_nnz": 485376,
"linear_dense_total": 4718592,
"linear_nnz": 1468416,
"linear_total": 7077888,
"nnz": 1474300,
"total": 7086528
},
"2": {
"linear_attention_nnz": 1769472,
"linear_attention_total": 2359296,
"linear_dense_nnz": 1523712,
"linear_dense_total": 4718592,
"linear_nnz": 3293184,
"linear_total": 7077888,
"nnz": 3300512,
"total": 7087296
},
"3": {
"linear_attention_nnz": 1769472,
"linear_attention_total": 2359296,
"linear_dense_nnz": 1555968,
"linear_dense_total": 4718592,
"linear_nnz": 3325440,
"linear_total": 7077888,
"nnz": 3332789,
"total": 7087296
},
"4": {
"linear_attention_nnz": 2162688,
"linear_attention_total": 2359296,
"linear_dense_nnz": 1617408,
"linear_dense_total": 4718592,
"linear_nnz": 3780096,
"linear_total": 7077888,
"nnz": 3787869,
"total": 7087680
},
"5": {
"linear_attention_nnz": 1966080,
"linear_attention_total": 2359296,
"linear_dense_nnz": 1514496,
"linear_dense_total": 4718592,
"linear_nnz": 3480576,
"linear_total": 7077888,
"nnz": 3488090,
"total": 7087488
},
"6": {
"linear_attention_nnz": 1769472,
"linear_attention_total": 2359296,
"linear_dense_nnz": 1135104,
"linear_dense_total": 4718592,
"linear_nnz": 2904576,
"linear_total": 7077888,
"nnz": 2911651,
"total": 7087296
},
"7": {
"linear_attention_nnz": 1572864,
"linear_attention_total": 2359296,
"linear_dense_nnz": 847872,
"linear_dense_total": 4718592,
"linear_nnz": 2420736,
"linear_total": 7077888,
"nnz": 2427432,
"total": 7087104
},
"8": {
"linear_attention_nnz": 1966080,
"linear_attention_total": 2359296,
"linear_dense_nnz": 474624,
"linear_dense_total": 4718592,
"linear_nnz": 2440704,
"linear_total": 7077888,
"nnz": 2447541,
"total": 7087488
},
"9": {
"linear_attention_nnz": 1179648,
"linear_attention_total": 2359296,
"linear_dense_nnz": 208896,
"linear_dense_total": 4718592,
"linear_nnz": 1388544,
"linear_total": 7077888,
"nnz": 1394440,
"total": 7086720
}
},
"linear_nnz": 30818304,
"linear_sparsity": 63.71527777777778,
"linear_total": 84934656,
"nnz": 54738530,
"pruned_heads": {
"0": [2, 4, 5, 6, 7],
"1": [0, 2, 3, 5, 6, 7, 8],
"10": [1, 4, 5, 6, 7],
"11": [0, 2, 5, 6, 7, 8, 11],
"2": [8, 4, 7],
"3": [2, 4, 6],
"4": [2],
"5": [1, 2],
"6": [2, 3, 7],
"7": [11, 3, 6, 7],
"8": [0, 4],
"9": [1, 4, 5, 7, 9, 10]
},
"total": 108883970,
"total_sparsity": 49.72765045212808
},
"training_args": {
"_n_gpu": -1,
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-08,
"dataloader_drop_last": false,
"dataloader_num_workers": 0,
"dataloader_pin_memory": true,
"ddp_find_unused_parameters": null,
"debug": false,
"deepspeed": null,
"disable_tqdm": false,
"do_eval": 1,
"do_predict": false,
"do_train": 1,
"eval_accumulation_steps": null,
"eval_steps": 2500,
"evaluation_strategy": "steps",
"fp16": false,
"fp16_backend": "auto",
"fp16_full_eval": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 1,
"greater_is_better": null,
"group_by_length": false,
"ignore_data_skip": false,
"label_names": null,
"label_smoothing_factor": 0.0,
"learning_rate": 3e-05,
"length_column_name": "length",
"load_best_model_at_end": false,
"local_rank": -1,
"logging_dir": "/data_2to/devel_data/nn_pruning/output/squad_test_final_fine_tune/fine_tuned_aws_nn-pruning-v10-a32-l5-dl0-5--2021-01-21--00-52-45",
"logging_first_step": false,
"logging_steps": 250,
"logging_strategy": "steps",
"lr_scheduler_type": "linear",
"max_grad_norm": 1.0,
"max_steps": -1,
"metric_for_best_model": null,
"mp_parameters": "",
"no_cuda": false,
"num_train_epochs": 4,
"optimize_model_before_eval": "disabled",
"output_dir": "/data_2to/devel_data/nn_pruning/output/squad_test_final_fine_tune/fine_tuned_aws_nn-pruning-v10-a32-l5-dl0-5--2021-01-21--00-52-45",
"overwrite_output_dir": 1,
"past_index": -1,
"per_device_eval_batch_size": 128,
"per_device_train_batch_size": 16,
"per_gpu_eval_batch_size": null,
"per_gpu_train_batch_size": null,
"prediction_loss_only": false,
"remove_unused_columns": true,
"report_to": null,
"run_name": "/data_2to/devel_data/nn_pruning/output/squad_test_final_fine_tune/fine_tuned_aws_nn-pruning-v10-a32-l5-dl0-5--2021-01-21--00-52-45",
"save_steps": 2500,
"save_strategy": "steps",
"save_total_limit": 50,
"seed": 17,
"sharded_ddp": "",
"skip_memory_metrics": false,
"tpu_metrics_debug": false,
"tpu_num_cores": null,
"warmup_ratio": 0.0,
"warmup_steps": 10,
"weight_decay": 0.0
}
} |