2025-04-24 10:12:23,878 INFO [train.py:653] { "allowed_excess_duration_ratio": 0.1, "audio_key": "question_audio", "batch_idx_train": 0, "best_train_epoch": -1, "best_train_loss": Infinity, "best_valid_epoch": -1, "best_valid_loss": Infinity, "bucketing_sampler": true, "deepscale": false, "deepscale_config": null, "deepspeed": true, "deepspeed_config": "./slam_omni/ds_config_zero1.json", "drop_last": true, "enable_musan": false, "enable_spec_aug": true, "enable_speech_output": true, "encoder_projector_ds_rate": 8, "env_info": { "IP address": "0.114.172.122", "hostname": "7515258", "icefall-git-branch": "master", "icefall-git-date": "Thu Apr 24 08:24:11 2025", "icefall-git-sha1": "2e9be467-dirty", "icefall-path": "/workspace/slam/icefall_omni", "k2-build-type": "Release", "k2-git-date": "Tue Oct 29 09:02:19 2024", "k2-git-sha1": "75e2ed6b2fd87c22b7f3f34bad48a69984bb8755", "k2-path": "/opt/conda/lib/python3.11/site-packages/k2/__init__.py", "k2-version": "1.24.4", "k2-with-cuda": true, "lhotse-path": "/workspace/slam/lhotse/lhotse/__init__.py", "lhotse-version": "1.30.0.dev+git.13c7616f.dirty", "python-version": "3.11", "torch-cuda-available": true, "torch-cuda-version": "12.4", "torch-version": "2.4.0" }, "exp_dir": "slam_omni/exp_speech2speech_rerun", "frame_shift_ms": 10, "huggingface_dataset_path_or_name": "/workspace/Belle_1.4M-SLAM-Omni", "input_strategy": "PrecomputedFeatures", "llm_path_or_name": "models/Qwen2.5-0.5B-Instruct", "log_interval": 50, "manifest_dir": "data/fbank", "max_duration": 40, "num_buckets": 30, "num_epochs": 10, "num_workers": 2, "on_the_fly_feats": false, "pretrained_model_path": null, "resample_to_16kHz": true, "reset_interval": 200, "return_cuts": true, "sampler_state_dict_path": null, "seed": 42, "shuffle": true, "spec_aug_time_warp_factor": 80, "speech_encoder_path_or_name": "models/whisper/v1.1/whisper-large-v2-multi-hans-zh-epoch-3-avg-10.pt", "start_epoch": 1, "subsampling_factor": 2, "tensorboard": true, "text_key": "answer", "unfreeze_llm": true, "unfreeze_speech_projector": true, "use_flash_attn": true, "use_fp16": true, "use_lora": true, "valid_interval": 5000 } 2025-04-24 10:12:23,878 INFO [train.py:655] About to create model 2025-04-24 10:12:44,166 INFO [train.py:808] Number of model parameters: 1326848644 2025-04-24 10:12:44,166 INFO [train.py:810] Trainable parameters (excluding model.eval modules): 2025-04-24 10:12:44,167 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,167 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,167 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,167 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,167 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,167 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.0.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.0.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.0.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.0.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.0.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.0.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,168 INFO [train.py:813] llm.base_model.model.model.layers.1.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.1.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.1.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.1.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.1.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.1.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.2.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.2.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.2.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.2.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.2.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,169 INFO [train.py:813] llm.base_model.model.model.layers.2.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.3.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.3.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.3.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.3.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.3.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.3.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,170 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.4.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.4.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.4.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.4.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.4.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.4.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,171 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.5.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.5.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.5.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.5.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.5.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.5.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.6.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.6.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.6.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.6.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.6.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,172 INFO [train.py:813] llm.base_model.model.model.layers.6.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.7.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.7.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.7.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.7.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.7.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.7.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,173 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.8.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.8.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.8.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.8.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.8.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.8.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.9.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,174 INFO [train.py:813] llm.base_model.model.model.layers.9.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.9.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.9.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.9.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.9.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.10.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.10.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.10.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.10.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.10.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,175 INFO [train.py:813] llm.base_model.model.model.layers.10.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.11.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.11.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.11.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.11.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.11.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.11.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,176 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.12.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.12.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.12.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.12.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.12.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.12.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.13.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.13.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,177 INFO [train.py:813] llm.base_model.model.model.layers.13.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.13.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.13.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.13.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.14.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.14.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.14.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.14.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.14.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.14.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,178 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.15.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.15.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.15.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.15.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.15.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.15.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,179 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.16.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.16.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.16.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.16.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.16.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.16.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.17.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,180 INFO [train.py:813] llm.base_model.model.model.layers.17.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.17.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.17.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.17.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.17.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.18.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.18.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.18.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.18.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.18.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.18.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,181 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.19.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.19.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.19.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.19.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.19.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.19.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,182 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.20.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.20.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.20.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.20.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.20.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.20.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.21.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.21.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,183 INFO [train.py:813] llm.base_model.model.model.layers.21.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.21.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.21.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.21.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.22.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.22.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.22.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.22.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.22.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.22.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,184 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,185 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,185 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,185 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 10:12:44,185 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,185 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,185 INFO [train.py:813] llm.base_model.model.model.layers.23.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,185 INFO [train.py:813] llm.base_model.model.model.layers.23.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,185 INFO [train.py:813] llm.base_model.model.model.layers.23.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 10:12:44,185 INFO [train.py:813] llm.base_model.model.model.layers.23.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 10:12:44,185 INFO [train.py:813] llm.base_model.model.model.layers.23.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 10:12:44,185 INFO [train.py:813] llm.base_model.model.model.layers.23.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 10:12:44,185 INFO [train.py:813] encoder_projector.linear1.weight: torch.Size([896, 10240]) 2025-04-24 10:12:44,185 INFO [train.py:813] encoder_projector.linear1.bias: torch.Size([896]) 2025-04-24 10:12:44,185 INFO [train.py:813] encoder_projector.linear2.weight: torch.Size([896, 896]) 2025-04-24 10:12:44,185 INFO [train.py:813] encoder_projector.linear2.bias: torch.Size([896]) 2025-04-24 10:12:44,185 INFO [train.py:813] codec_lm.base_model.model.model.layers.0.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,185 INFO [train.py:813] codec_lm.base_model.model.model.layers.0.self_attn.q_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,185 INFO [train.py:813] codec_lm.base_model.model.model.layers.0.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,185 INFO [train.py:813] codec_lm.base_model.model.model.layers.0.self_attn.k_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.0.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.0.self_attn.v_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.0.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.0.self_attn.o_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.0.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.0.mlp.gate_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.0.mlp.up_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.0.mlp.up_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.0.mlp.down_proj.lora_A.default.weight: torch.Size([64, 2048]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.0.mlp.down_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.1.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.1.self_attn.q_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.1.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.1.self_attn.k_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.1.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.1.self_attn.v_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.1.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.1.self_attn.o_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.1.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,186 INFO [train.py:813] codec_lm.base_model.model.model.layers.1.mlp.gate_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.1.mlp.up_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.1.mlp.up_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.1.mlp.down_proj.lora_A.default.weight: torch.Size([64, 2048]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.1.mlp.down_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.2.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.2.self_attn.q_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.2.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.2.self_attn.k_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.2.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.2.self_attn.v_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.2.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.2.self_attn.o_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.2.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.2.mlp.gate_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.2.mlp.up_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.2.mlp.up_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.2.mlp.down_proj.lora_A.default.weight: torch.Size([64, 2048]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.2.mlp.down_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,187 INFO [train.py:813] codec_lm.base_model.model.model.layers.3.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.3.self_attn.q_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.3.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.3.self_attn.k_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.3.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.3.self_attn.v_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.3.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.3.self_attn.o_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.3.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.3.mlp.gate_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.3.mlp.up_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.3.mlp.up_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.3.mlp.down_proj.lora_A.default.weight: torch.Size([64, 2048]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.3.mlp.down_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.4.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.4.self_attn.q_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.4.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.4.self_attn.k_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.4.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,188 INFO [train.py:813] codec_lm.base_model.model.model.layers.4.self_attn.v_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.4.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.4.self_attn.o_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.4.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.4.mlp.gate_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.4.mlp.up_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.4.mlp.up_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.4.mlp.down_proj.lora_A.default.weight: torch.Size([64, 2048]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.4.mlp.down_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.5.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.5.self_attn.q_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.5.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.5.self_attn.k_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.5.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.5.self_attn.v_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.5.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.5.self_attn.o_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.5.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.5.mlp.gate_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,189 INFO [train.py:813] codec_lm.base_model.model.model.layers.5.mlp.up_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.5.mlp.up_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.5.mlp.down_proj.lora_A.default.weight: torch.Size([64, 2048]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.5.mlp.down_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.6.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.6.self_attn.q_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.6.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.6.self_attn.k_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.6.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.6.self_attn.v_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.6.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.6.self_attn.o_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.6.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.6.mlp.gate_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.6.mlp.up_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.6.mlp.up_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.6.mlp.down_proj.lora_A.default.weight: torch.Size([64, 2048]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.6.mlp.down_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.7.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.7.self_attn.q_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,190 INFO [train.py:813] codec_lm.base_model.model.model.layers.7.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.7.self_attn.k_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.7.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.7.self_attn.v_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.7.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.7.self_attn.o_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.7.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.7.mlp.gate_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.7.mlp.up_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.7.mlp.up_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.7.mlp.down_proj.lora_A.default.weight: torch.Size([64, 2048]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.7.mlp.down_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.8.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.8.self_attn.q_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.8.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.8.self_attn.k_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.8.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.8.self_attn.v_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.8.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,191 INFO [train.py:813] codec_lm.base_model.model.model.layers.8.self_attn.o_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.8.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.8.mlp.gate_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.8.mlp.up_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.8.mlp.up_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.8.mlp.down_proj.lora_A.default.weight: torch.Size([64, 2048]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.8.mlp.down_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.9.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.9.self_attn.q_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.9.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.9.self_attn.k_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.9.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.9.self_attn.v_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.9.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.9.self_attn.o_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.9.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.9.mlp.gate_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.9.mlp.up_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.9.mlp.up_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.9.mlp.down_proj.lora_A.default.weight: torch.Size([64, 2048]) 2025-04-24 10:12:44,192 INFO [train.py:813] codec_lm.base_model.model.model.layers.9.mlp.down_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.10.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.10.self_attn.q_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.10.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.10.self_attn.k_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.10.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.10.self_attn.v_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.10.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.10.self_attn.o_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.10.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.10.mlp.gate_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.10.mlp.up_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.10.mlp.up_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.10.mlp.down_proj.lora_A.default.weight: torch.Size([64, 2048]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.10.mlp.down_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.11.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.11.self_attn.q_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.11.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.11.self_attn.k_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,193 INFO [train.py:813] codec_lm.base_model.model.model.layers.11.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,194 INFO [train.py:813] codec_lm.base_model.model.model.layers.11.self_attn.v_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,194 INFO [train.py:813] codec_lm.base_model.model.model.layers.11.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,194 INFO [train.py:813] codec_lm.base_model.model.model.layers.11.self_attn.o_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,194 INFO [train.py:813] codec_lm.base_model.model.model.layers.11.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,194 INFO [train.py:813] codec_lm.base_model.model.model.layers.11.mlp.gate_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,194 INFO [train.py:813] codec_lm.base_model.model.model.layers.11.mlp.up_proj.lora_A.default.weight: torch.Size([64, 1024]) 2025-04-24 10:12:44,194 INFO [train.py:813] codec_lm.base_model.model.model.layers.11.mlp.up_proj.lora_B.default.weight: torch.Size([2048, 64]) 2025-04-24 10:12:44,194 INFO [train.py:813] codec_lm.base_model.model.model.layers.11.mlp.down_proj.lora_A.default.weight: torch.Size([64, 2048]) 2025-04-24 10:12:44,194 INFO [train.py:813] codec_lm.base_model.model.model.layers.11.mlp.down_proj.lora_B.default.weight: torch.Size([1024, 64]) 2025-04-24 10:12:44,194 INFO [train.py:813] speech_token_projector.weight: torch.Size([1024, 896]) 2025-04-24 10:12:44,194 INFO [train.py:813] speech_token_projector.bias: torch.Size([1024]) 2025-04-24 10:12:44,194 INFO [train.py:813] codec_lm_head.weight: torch.Size([4100, 1024]) 2025-04-24 10:12:44,194 INFO [train.py:813] codec_lm_head.bias: torch.Size([4100]) 2025-04-24 10:12:44,194 INFO [train.py:819] Device: cuda:0 2025-04-24 10:12:44,922 INFO [train.py:823] Using DeepSpeed 2025-04-24 10:12:46,690 INFO [data_module.py:445] About to get train cuts 2025-04-24 10:12:46,693 INFO [data_module.py:250] Disable MUSAN 2025-04-24 10:12:46,693 INFO [data_module.py:268] Enable SpecAugment 2025-04-24 10:12:46,693 INFO [data_module.py:269] Time warp factor: 80 2025-04-24 10:12:46,693 INFO [data_module.py:279] Num frame mask: 10 2025-04-24 10:12:46,693 INFO [data_module.py:292] About to create train dataset 2025-04-24 10:12:46,693 INFO [data_module.py:319] Using DynamicBucketingSampler. 2025-04-24 10:12:47,691 INFO [data_module.py:336] About to create train dataloader 2025-04-24 10:12:47,692 INFO [data_module.py:436] About to get test cuts 2025-04-24 10:12:47,693 INFO [data_module.py:365] About to create dev dataset 2025-04-24 10:12:47,781 INFO [data_module.py:379] About to create dev dataloader 2025-04-24 10:12:47,788 INFO [train.py:875] start training from epoch 1 2025-04-24 10:13:18,332 INFO [train.py:539] Computing validation loss