2025-04-24 14:23:07,824 INFO [train.py:653] { "allowed_excess_duration_ratio": 0.1, "audio_key": "question_audio", "batch_idx_train": 0, "best_train_epoch": -1, "best_train_loss": Infinity, "best_valid_epoch": -1, "best_valid_loss": Infinity, "bucketing_sampler": true, "deepscale": false, "deepscale_config": null, "deepspeed": true, "deepspeed_config": "./slam_omni/ds_config_zero1.json", "drop_last": true, "enable_musan": false, "enable_spec_aug": true, "enable_speech_output": true, "encoder_projector_ds_rate": 8, "env_info": { "IP address": "0.114.183.253", "hostname": "7518205", "icefall-git-branch": null, "icefall-git-date": null, "icefall-git-sha1": null, "icefall-path": "/workspace/slam/icefall_omni", "k2-build-type": "Release", "k2-git-date": "Tue Oct 29 09:02:19 2024", "k2-git-sha1": "75e2ed6b2fd87c22b7f3f34bad48a69984bb8755", "k2-path": "/opt/conda/lib/python3.11/site-packages/k2/__init__.py", "k2-version": "1.24.4", "k2-with-cuda": true, "lhotse-path": "/workspace/slam/lhotse/lhotse/__init__.py", "lhotse-version": "1.30.0.dev+git.13c7616f.dirty", "python-version": "3.11", "torch-cuda-available": true, "torch-cuda-version": "12.4", "torch-version": "2.4.0" }, "exp_dir": "slam_omni/exp_speech2speech_rerun", "frame_shift_ms": 10, "huggingface_dataset_path_or_name": "/workspace/Belle_1.4M-SLAM-Omni", "input_strategy": "PrecomputedFeatures", "llm_path_or_name": "models/Qwen2.5-0.5B-Instruct", "log_interval": 50, "manifest_dir": "data/fbank", "max_duration": 50, "num_buckets": 30, "num_epochs": 10, "num_workers": 2, "on_the_fly_feats": false, "pretrained_model_path": "./slam_omni/exp_speech2speech_rerun/epoch-1-checkpoint-15000.pt/pytorch_model.bin", "resample_to_16kHz": true, "reset_interval": 200, "return_cuts": true, "sampler_state_dict_path": "./slam_omni/exp_speech2speech_rerun/epoch-1-checkpoint-15000-sampler.pt", "seed": 42, "shuffle": true, "spec_aug_time_warp_factor": 80, "speech_encoder_path_or_name": "models/whisper/v1.1/whisper-large-v2-multi-hans-zh-epoch-3-avg-10.pt", "start_epoch": 1, "subsampling_factor": 2, "tensorboard": true, "text_key": "answer", "unfreeze_llm": true, "unfreeze_speech_projector": true, "use_flash_attn": true, "use_fp16": true, "use_lora": true, "valid_interval": 5000 } 2025-04-24 14:23:07,824 INFO [train.py:655] About to create model 2025-04-24 14:23:41,026 INFO [train.py:808] Number of model parameters: 1314396804 2025-04-24 14:23:41,026 INFO [train.py:810] Trainable parameters (excluding model.eval modules): 2025-04-24 14:23:41,027 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,027 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,027 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,027 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.0.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.0.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.0.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.0.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.0.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.0.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.0.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,028 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.1.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.1.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.1.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.1.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.1.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.1.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.1.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.2.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.2.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.2.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.2.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.2.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.2.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,029 INFO [train.py:813] llm.base_model.model.model.layers.2.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.3.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.3.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.3.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.3.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.3.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.3.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.3.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,030 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.4.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.4.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.4.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.4.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.4.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.4.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.4.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.5.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.5.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.5.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.5.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.5.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.5.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,031 INFO [train.py:813] llm.base_model.model.model.layers.5.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.6.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.6.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.6.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.6.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.6.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.6.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.6.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,032 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.7.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.7.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.7.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.7.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.7.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.7.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.7.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.8.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.8.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.8.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.8.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.8.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.8.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,033 INFO [train.py:813] llm.base_model.model.model.layers.8.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.9.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.9.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.9.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.9.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.9.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.9.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.9.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,034 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.10.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.10.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.10.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.10.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.10.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.10.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.10.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.11.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.11.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.11.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.11.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,035 INFO [train.py:813] llm.base_model.model.model.layers.11.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.11.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.11.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.12.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.12.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.12.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.12.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.12.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.12.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.12.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,036 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.13.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.13.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.13.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.13.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.13.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.13.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.13.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.14.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.14.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.14.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.14.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,037 INFO [train.py:813] llm.base_model.model.model.layers.14.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.14.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.14.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.15.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.15.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.15.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.15.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.15.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.15.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.15.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,038 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.16.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.16.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.16.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.16.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.16.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.16.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.16.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.17.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.17.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.17.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,039 INFO [train.py:813] llm.base_model.model.model.layers.17.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.17.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.17.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.17.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.18.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.18.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.18.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.18.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.18.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.18.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.18.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,040 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.19.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.19.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.19.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.19.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.19.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.19.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.19.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.20.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.20.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,041 INFO [train.py:813] llm.base_model.model.model.layers.20.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.20.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.20.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.20.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.20.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.21.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.21.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.21.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.21.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.21.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.21.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.21.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,042 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.22.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.22.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.22.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.22.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.22.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.22.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.22.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.q_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.q_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.k_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.k_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.v_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.v_proj.lora_B.default.weight: torch.Size([128, 64]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.o_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.23.self_attn.o_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,043 INFO [train.py:813] llm.base_model.model.model.layers.23.mlp.gate_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,044 INFO [train.py:813] llm.base_model.model.model.layers.23.mlp.gate_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,044 INFO [train.py:813] llm.base_model.model.model.layers.23.mlp.up_proj.lora_A.default.weight: torch.Size([64, 896]) 2025-04-24 14:23:41,044 INFO [train.py:813] llm.base_model.model.model.layers.23.mlp.up_proj.lora_B.default.weight: torch.Size([4864, 64]) 2025-04-24 14:23:41,044 INFO [train.py:813] llm.base_model.model.model.layers.23.mlp.down_proj.lora_A.default.weight: torch.Size([64, 4864]) 2025-04-24 14:23:41,044 INFO [train.py:813] llm.base_model.model.model.layers.23.mlp.down_proj.lora_B.default.weight: torch.Size([896, 64]) 2025-04-24 14:23:41,044 INFO [train.py:813] encoder_projector.linear1.weight: torch.Size([896, 10240]) 2025-04-24 14:23:41,044 INFO [train.py:813] encoder_projector.linear1.bias: torch.Size([896]) 2025-04-24 14:23:41,044 INFO [train.py:813] encoder_projector.linear2.weight: torch.Size([896, 896]) 2025-04-24 14:23:41,044 INFO [train.py:813] encoder_projector.linear2.bias: torch.Size([896]) 2025-04-24 14:23:41,044 INFO [train.py:813] codec_lm.model.embed_tokens.weight: torch.Size([4100, 1024]) 2025-04-24 14:23:41,044 INFO [train.py:813] codec_lm.model.layers.0.self_attn.q_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,044 INFO [train.py:813] codec_lm.model.layers.0.self_attn.q_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,044 INFO [train.py:813] codec_lm.model.layers.0.self_attn.k_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,044 INFO [train.py:813] codec_lm.model.layers.0.self_attn.k_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,044 INFO [train.py:813] codec_lm.model.layers.0.self_attn.v_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,044 INFO [train.py:813] codec_lm.model.layers.0.self_attn.v_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,044 INFO [train.py:813] codec_lm.model.layers.0.self_attn.o_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,044 INFO [train.py:813] codec_lm.model.layers.0.mlp.gate_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,044 INFO [train.py:813] codec_lm.model.layers.0.mlp.up_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,044 INFO [train.py:813] codec_lm.model.layers.0.mlp.down_proj.weight: torch.Size([1024, 2048]) 2025-04-24 14:23:41,044 INFO [train.py:813] codec_lm.model.layers.0.input_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,044 INFO [train.py:813] codec_lm.model.layers.0.post_attention_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,044 INFO [train.py:813] codec_lm.model.layers.1.self_attn.q_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,044 INFO [train.py:813] codec_lm.model.layers.1.self_attn.q_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.1.self_attn.k_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.1.self_attn.k_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.1.self_attn.v_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.1.self_attn.v_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.1.self_attn.o_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.1.mlp.gate_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.1.mlp.up_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.1.mlp.down_proj.weight: torch.Size([1024, 2048]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.1.input_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.1.post_attention_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.2.self_attn.q_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.2.self_attn.q_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.2.self_attn.k_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.2.self_attn.k_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.2.self_attn.v_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.2.self_attn.v_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.2.self_attn.o_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.2.mlp.gate_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.2.mlp.up_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.2.mlp.down_proj.weight: torch.Size([1024, 2048]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.2.input_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.2.post_attention_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.3.self_attn.q_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.3.self_attn.q_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,045 INFO [train.py:813] codec_lm.model.layers.3.self_attn.k_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.3.self_attn.k_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.3.self_attn.v_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.3.self_attn.v_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.3.self_attn.o_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.3.mlp.gate_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.3.mlp.up_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.3.mlp.down_proj.weight: torch.Size([1024, 2048]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.3.input_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.3.post_attention_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.4.self_attn.q_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.4.self_attn.q_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.4.self_attn.k_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.4.self_attn.k_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.4.self_attn.v_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.4.self_attn.v_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.4.self_attn.o_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.4.mlp.gate_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.4.mlp.up_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.4.mlp.down_proj.weight: torch.Size([1024, 2048]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.4.input_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.4.post_attention_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.5.self_attn.q_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.5.self_attn.q_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,046 INFO [train.py:813] codec_lm.model.layers.5.self_attn.k_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.5.self_attn.k_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.5.self_attn.v_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.5.self_attn.v_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.5.self_attn.o_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.5.mlp.gate_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.5.mlp.up_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.5.mlp.down_proj.weight: torch.Size([1024, 2048]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.5.input_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.5.post_attention_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.6.self_attn.q_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.6.self_attn.q_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.6.self_attn.k_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.6.self_attn.k_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.6.self_attn.v_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.6.self_attn.v_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.6.self_attn.o_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.6.mlp.gate_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.6.mlp.up_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.6.mlp.down_proj.weight: torch.Size([1024, 2048]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.6.input_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.6.post_attention_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.7.self_attn.q_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.7.self_attn.q_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.7.self_attn.k_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,047 INFO [train.py:813] codec_lm.model.layers.7.self_attn.k_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.7.self_attn.v_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.7.self_attn.v_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.7.self_attn.o_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.7.mlp.gate_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.7.mlp.up_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.7.mlp.down_proj.weight: torch.Size([1024, 2048]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.7.input_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.7.post_attention_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.8.self_attn.q_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.8.self_attn.q_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.8.self_attn.k_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.8.self_attn.k_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.8.self_attn.v_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.8.self_attn.v_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.8.self_attn.o_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.8.mlp.gate_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.8.mlp.up_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.8.mlp.down_proj.weight: torch.Size([1024, 2048]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.8.input_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.8.post_attention_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.9.self_attn.q_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.9.self_attn.q_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.9.self_attn.k_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,048 INFO [train.py:813] codec_lm.model.layers.9.self_attn.k_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.9.self_attn.v_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.9.self_attn.v_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.9.self_attn.o_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.9.mlp.gate_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.9.mlp.up_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.9.mlp.down_proj.weight: torch.Size([1024, 2048]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.9.input_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.9.post_attention_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.10.self_attn.q_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.10.self_attn.q_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.10.self_attn.k_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.10.self_attn.k_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.10.self_attn.v_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.10.self_attn.v_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.10.self_attn.o_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.10.mlp.gate_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.10.mlp.up_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.10.mlp.down_proj.weight: torch.Size([1024, 2048]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.10.input_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.10.post_attention_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.11.self_attn.q_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.11.self_attn.q_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.11.self_attn.k_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.11.self_attn.k_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.11.self_attn.v_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,049 INFO [train.py:813] codec_lm.model.layers.11.self_attn.v_proj.bias: torch.Size([1024]) 2025-04-24 14:23:41,050 INFO [train.py:813] codec_lm.model.layers.11.self_attn.o_proj.weight: torch.Size([1024, 1024]) 2025-04-24 14:23:41,050 INFO [train.py:813] codec_lm.model.layers.11.mlp.gate_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,050 INFO [train.py:813] codec_lm.model.layers.11.mlp.up_proj.weight: torch.Size([2048, 1024]) 2025-04-24 14:23:41,050 INFO [train.py:813] codec_lm.model.layers.11.mlp.down_proj.weight: torch.Size([1024, 2048]) 2025-04-24 14:23:41,050 INFO [train.py:813] codec_lm.model.layers.11.input_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,050 INFO [train.py:813] codec_lm.model.layers.11.post_attention_layernorm.weight: torch.Size([1024]) 2025-04-24 14:23:41,050 INFO [train.py:813] codec_lm.model.norm.weight: torch.Size([1024]) 2025-04-24 14:23:41,050 INFO [train.py:813] codec_lm.lm_head.weight: torch.Size([4100, 1024]) 2025-04-24 14:23:41,050 INFO [train.py:813] speech_token_projector.weight: torch.Size([1024, 1792]) 2025-04-24 14:23:41,050 INFO [train.py:813] speech_token_projector.bias: torch.Size([1024]) 2025-04-24 14:23:41,050 INFO [train.py:813] codec_lm_head.weight: torch.Size([4100, 1024]) 2025-04-24 14:23:41,050 INFO [train.py:813] codec_lm_head.bias: torch.Size([4100]) 2025-04-24 14:23:41,050 INFO [train.py:819] Device: cuda:3 2025-04-24 14:23:42,316 INFO [train.py:823] Using DeepSpeed 2025-04-24 14:23:45,911 INFO [data_module.py:445] About to get train cuts 2025-04-24 14:23:45,914 INFO [data_module.py:250] Disable MUSAN 2025-04-24 14:23:45,914 INFO [data_module.py:268] Enable SpecAugment 2025-04-24 14:23:45,914 INFO [data_module.py:269] Time warp factor: 80 2025-04-24 14:23:45,914 INFO [data_module.py:279] Num frame mask: 10 2025-04-24 14:23:45,914 INFO [data_module.py:292] About to create train dataset 2025-04-24 14:23:45,914 INFO [data_module.py:319] Using DynamicBucketingSampler. 2025-04-24 14:23:46,908 INFO [data_module.py:336] About to create train dataloader 2025-04-24 14:23:46,908 INFO [data_module.py:339] Loading sampler state dict 2025-04-24 14:31:01,125 INFO [data_module.py:436] About to get test cuts 2025-04-24 14:31:01,126 INFO [data_module.py:365] About to create dev dataset 2025-04-24 14:31:01,226 INFO [data_module.py:379] About to create dev dataloader 2025-04-24 14:31:01,226 INFO [train.py:875] start training from epoch 1 2025-04-24 14:31:07,576 INFO [train.py:539] Computing validation loss 2025-04-24 14:31:25,994 INFO [train.py:548] Epoch 1, validation: loss=0.001835, acc=0.5741, codec_acc=0.4185, codec_topk_acc=0.8973, codec_loss=0.0009616, text_loss=0.0008736, over 285507.00 frames. 2025-04-24 14:31:25,994 INFO [train.py:549] Maximum memory allocated so far is 12415MB 2025-04-24 14:31:26,617 INFO [train.py:611] Epoch 1, batch 0, loss[loss=0.001616, acc=0.5799, codec_acc=0.4322, codec_topk_acc=0.9044, codec_loss=0.0008327, text_loss=0.0007834, over 2387.00 frames. ], tot_loss[loss=0.001616, acc=0.5799, codec_acc=0.4322, codec_topk_acc=0.9044, codec_loss=0.0008327, text_loss=0.0007834, over 2387.00 frames. ], batch size: 8, lr: 0.00e+020252025-04-24 14:31:51,914 INFO [train.py:611] Epoch 1, batch 50, loss[loss=0.001639, acc=0.4779, codec_acc=0.412, codec_topk_acc=0.9095, codec_loss=0.0008087, text_loss=0.0008306, over 2437.00 frames. ], tot_loss[loss=0.001746, acc=0.5572, codec_acc=0.4215, codec_topk_acc=0.8995, codec_loss=0.0009017, text_loss=0.0008447, over 100155.57 frames. ], batch size: 6, lr: 8.49e-020202025-04-24 14:32:17,111 INFO [train.py:611] Epoch 1, batch 100, loss[loss=0.002011, acc=0.531, codec_acc=0.4115, codec_topk_acc=0.9053, codec_loss=0.001043, text_loss=0.000968, over 1925.00 frames. ], tot_loss[loss=0.001774, acc=0.5582, codec_acc=0.422, codec_topk_acc=0.899, codec_loss=0.0009154, text_loss=0.0008591, over 173717.13 frames. ], batch size: 17, lr: 9.98e-05, 222025-04-24 14:32:43,834 INFO [train.py:611] Epoch 1, batch 150, loss[loss=0.00176, acc=0.5114, codec_acc=0.4241, codec_topk_acc=0.9043, codec_loss=0.0008678, text_loss=0.0008921, over 2278.00 frames. ], tot_loss[loss=0.001774, acc=0.5547, codec_acc=0.4215, codec_topk_acc=0.8983, codec_loss=0.0009083, text_loss=0.0008661, over 234221.34 frames. ], batch size: 11, lr: 1.00e-04, 22025-04-24 14:33:08,347 INFO [train.py:611] Epoch 1, batch 200, loss[loss=0.001876, acc=0.4801, codec_acc=0.434, codec_topk_acc=0.9026, codec_loss=0.0008418, text_loss=0.001034, over 2294.00 frames. ], tot_loss[loss=0.001765, acc=0.5519, codec_acc=0.4221, codec_topk_acc=0.8998, codec_loss=0.0009019, text_loss=0.0008634, over 281067.50 frames. ], batch size: 11, lr: 1.00e-0420222025-04-24 14:33:33,999 INFO [train.py:611] Epoch 1, batch 250, loss[loss=0.001751, acc=0.5563, codec_acc=0.4348, codec_topk_acc=0.9093, codec_loss=0.0008622, text_loss=0.0008889, over 2241.00 frames. ], tot_loss[loss=0.001761, acc=0.5576, codec_acc=0.4222, codec_topk_acc=0.8979, codec_loss=0.0009073, text_loss=0.0008539, over 316253.29 frames. ], batch size: 14, lr: 1.00e-2022025-04-24 14:34:00,442 INFO [train.py:611] Epoch 1, batch 300, loss[loss=0.001807, acc=0.5416, codec_acc=0.4207, codec_topk_acc2025-04-24 14:34:00,442 INFO [train.py:611] Epoch 1, batch 300, loss[loss=0.001969, acc=0.459, codec_acc=0.4444, codec_topk_acc=0.9126, codec_loss=0.0008568, text_loss=0.001113, over 2217.00 frames. ], tot_loss[loss=0.00176, acc=0.5517, codec_acc=0202025-04-24 14:34:25,390 INFO [train.py:611] Epoch 1, batch 350, loss[loss=0.001927, acc=0.5133, codec_acc=0.4262, codec_topk_acc=0.9031, codec_loss=0.0008854, text_loss=0.001042, over 2225.00 frames. ], tot_loss[loss=0.001757, acc=0.5516, codec_acc=0.4225, codec_topk_acc=0.8988, codec_loss=0.0008953, text_loss=0.0008613, over 369308.01 frames. ], batch size: 9, lr: 1.00e-042022025-04-24 14:34:50,211 INFO [train.py:611] Epoch 1, batch 400, loss[loss=0.001596, acc=0.6382, codec_acc=0.4131, codec_topk_a2025-04-24 14:34:50,212 INFO [train.py:611] Epoch 1, batch 400, loss[loss=0.001679, acc=0.5135, codec_acc=0.4239, codec_topk_acc=0.9034, codec_loss=0.0008574, text_loss=0.0008218, over 2296.00 frames. ], tot_loss[loss=0.001741, acc=0.5618, codec_acc22025-04-24 14:35:16,996 INFO [train.py:611] Epoch 1, batch 450, loss[loss=0.001793, acc=0.5455, codec_acc=0.4325, codec_topk_acc=0.8996, codec_loss=0.0009004, text_loss=0.000893, over 2220.00 frames. ], tot_loss[loss=0.001758, acc=0.5548, codec_acc=0.4237, codec_topk_acc=0.8995, codec_loss=0.0008976, text_loss=0.0008606, over 397962.99 frames. ], batch size: 6, lr: 1.00e-04,202025-04-24 14:35:42,865 INFO [train.py:611] Epoch 1, batch 500, loss[loss=0.00196, acc=0.5094, codec_acc=0.4118, codec_topk_acc=0.2025-04-24 14:35:42,865 INFO [train.py:611] Epoch 1, batch 500, loss[loss=0.001877, acc=0.525, codec_acc=0.4363, codec_topk_acc=0.9104, codec_loss=0.0009391, text_loss=0.0009382, over 2088.00 frames. ], tot_loss[loss=0.001753, acc=0.5569, codec_20202025-04-24 14:36:09,827 INFO [train.py:611] Epoch 1, batch 550, loss[loss=0.001548, acc=0.5809, codec_acc=0.4371, codec_topk_acc=0.9169, codec_loss=0.0008068, text_loss=0.0007409, over 2406.00 frames. ], tot_loss[loss=0.001769, acc=0.5498, codec_acc=0.4235, codec_topk_acc=0.8999, codec_loss=0.0008972, text_loss=0.0008715, over 416336.15 frames. ], batch size: 7, lr: 1.00e2025-020252025-04-24 14:36:41,050 INFO [train.py:611] Epoch 1, batch 600, loss[loss=0.001444, acc=0.6394, codec_acc=0.4245, codec_topk_acc=0.9009, codec_loss=0.0008405, text_loss=0.0006037, over 2372.00 frames. ], tot_loss[loss=0.001739, acc=0.5592, codec_acc=0.4236, codec_topk_acc=0.8999, codec_loss=0.0008947, text_loss=0.0008444, over 423320.88 frames. ], batch size: 8, lr2025-22025-04-24 14:37:06,362 INFO [train.py:611] Epoch 1, batch 650, loss[loss=0.001757, acc=0.5354, codec_acc=0.4336, codec_topk2025-04-24 14:37:06,363 INFO [train.py:611] Epoch 1, batch 650, loss[loss=0.001818, acc=0.5301, codec_acc=0.4364, codec_topk_acc=0.9085, codec_loss=0.0008659, text_loss=0.0009517, over 2212.00 frames. ], tot_loss[loss=0.001738, acc=0.5611, codec_ac2025-22022025-04-24 14:37:41,872 INFO [train.py:611] Epoch 1, batch 700, loss[loss=0.001701, acc=0.6455, codec_acc=0.4154, codec_topk_acc=0.8749, codec_loss=0.0009792, text_loss=0.000722, over 2135.00 frames. ], tot_loss[loss=0.001744, acc=0.5586, codec_acc=0.4241, codec_topk_acc=0.8996, codec_loss=0.0008944, text_loss=0.0008491, over 432121.01 frames. ], batch size: 4, lr: 1.2025-02025-04-24 14:38:11,200 INFO [train.py:611] Epoch 1, batch 750, loss[loss=0.001645, acc=0.5327, codec_acc=0.4359, codec_topk_acc=0.2025-04-24 14:38:11,200 INFO [train.py:611] Epoch 1, batch 750, loss[loss=0.001786, acc=0.5024, codec_acc=0.4284, codec_topk_acc=0.9, codec_loss=0.0008294, text_loss=0.000957, over 2395.00 frames. ], tot_loss[loss=0.001758, acc=0.5516, codec_2025-04202025-04-24 14:38:46,616 INFO [train.py:611] Epoch 1, batch 800, loss[loss=0.001845, acc=0.5433, codec_ac2025-04-24 14:38:46,617 INFO [train.py:611] Epoch 1, batch 800, loss[loss=0.001807, acc=0.5472, codec_acc=0.4125, codec_topk_acc=0.8996, codec_loss=0.0009111, text_loss=0.0008957, over 2211.00 frames. ], tot_loss[loss=0.001741, acc=0.5544, codec_acc=0.4258, codec_topk20252025-04-24 14:39:19,408 INFO [train.py:611] Epoch 1, batch 850, loss[loss=0.001805, acc=0.5102, codec_acc=0.4306, codec_topk_acc=0.9032, codec_loss=0.0008161, text_loss=0.000989, over 2395.00 frames. ], tot_loss[loss=0.001745, acc=0.5584, codec_acc=0.4247, codec_topk_acc=0.9013, codec_loss=0.0008927, text_loss=0.0008519, over 438277.08 frames. ], batch size: 9, lr: 1.00e-0420220252025-2025-04-24 14:39:44,110 INFO [train.py:611] Epoch 1, batch 900, loss[loss=0.001729, acc=0.489, codec_acc=0.4175, codec_topk_acc=0.9032, codec_loss=0.0008335, text_loss=0.0008955, over 2426.00 frames. ], tot_loss[loss=0.001733, acc=0.5601, codec_acc=0.4253, codec_topk_acc=0.9011, codec_loss=0.0008927, text_loss=0.0008401, over 439309.24 frames. ], batch size: 7, l2022022025-04-24 14:40:15,878 INFO [train.py:611] Epoch 1, batch 950, loss[loss=0.00191, acc=0.4775, codec_acc=0.4241, codec_topk_acc=0.9023, codec_loss=0.0008785, text_loss=0.001032, over 2237.00 frames. ], tot_loss[loss=0.001744, acc=0.5615, codec_acc=0.424, codec_topk_acc=0.8996, codec_loss=0.0008944, text_loss=0.0008494, over 441632.68 frames. ], batch size: 14, lr: 1.00e-02022025-04-24 2025-04-24 14:40:41,684 INFO [train.py:611] Epoch 1, batch 1000, loss[loss=0.002243, acc=0.5079, codec_acc=0.4278, 2025-04-24 14:40:41,684 INFO [train.py:611] Epoch 1, batch 1000, loss[loss=0.00182, acc=0.7014, codec_acc=0.4285, codec_topk_acc=0.8914, codec_loss=0.001086, text_loss=0.0007335, over 1866.00 frames. ], tot_loss[loss=0.001739, acc=0.5627, codec_acc=2022022025-04-24 14:41:07,678 INFO [train.py:611] Epoch 1, batch 1050, loss[loss=0.00194, acc=0.4819, codec_acc=0.4239, codec_topk_ac2025-04-24 14:41:07,678 INFO [train.py:611] Epoch 1, batch 1050, loss[loss=0.001676, acc=0.5498, codec_acc=0.4603, codec_topk_acc=0.9135, codec_loss=0.000815, text_loss=0.0008606, over 2283.00 frames. ], tot_loss[loss=0.00176, acc=0.5554, codec_acc2025-04-24 14:41:39,629 INFO [train.py:611] Epoch 1, batch 1100, loss[loss=0.001559, acc=0.6326, codec_acc=0.4346, codec_topk_acc=0.899, codec_loss=0.0008957, text_loss=0.0006634, over 2199.00 frames. ], tot_loss[loss=0.001732, acc=0.5644, codec_acc=0.4256, codec_topk_acc=0.9014, codec_loss=0.0008935, text_loss=0.0008387, over 441902.17 frames. ], batch size: 6, lr: 1.00e-04, 2025-04-24 14:42:04,273 INFO [train.py:611] Epoch 1, batch 1150, loss[loss=0.002014, acc=0.5245, codec_acc=0.4153, codec_topk_acc=0.8944, codec_loss=0.0009954, text_loss=0.001019, over 2018.00 frames. ], tot_loss[loss=0.001728, acc=0.5657, codec_acc=0.426, codec_topk_acc=0.9021, codec_loss=0.0008926, text_loss=0.0008359, over 441834.53 frames. ], batch size: 4, lr: 1.00e-04, 2025-022025-04-24 14:42:30,505 INFO [train.py:611] Epoch 1, batch 1200, loss[loss=0.002157, acc=0.5423, codec_acc=0.3835, codec_to2025-04-24 14:42:30,505 INFO [train.py:611] Epoch 1, batch 1200, loss[loss=0.002109, acc=0.5315, codec_acc=0.4307, codec_topk_acc=0.895, codec_loss=0.0009749, text_loss=0.001134, over 2000.00 frames. ], tot_loss[loss=0.001767, acc=0.5567, codec_ac202025-04-24 14:42:56,631 INFO [train.py:611] Epoch 1, batch 1250, loss[loss=0.001955, acc=0.4862, codec_acc=0.4247, codec_topk_acc=0.9022, codec_loss=0.0008905, text_loss=0.001065, over 2217.00 frames. ], tot_loss[loss=0.001733, acc=0.5591, codec_acc=0.4264, codec_topk_acc=0.9024, codec_loss=0.0008873, text_loss=0.000846, over 444371.91 frames. ], batch size: 10, lr: 1.00e-2025-2025-04-24 14:43:25,702 INFO [train.py:611] Epoch 1, batch 1300, loss[loss=0.001575, acc=0.5879, codec_acc=0.4264, codec_topk_acc=0.9065, codec_loss=0.0008338, text_loss=0.0007416, over 2325.00 frames. ], tot_loss[loss=0.001737, acc=0.5579, codec_acc=0.4269, codec_topk_acc=0.9029, codec_loss=0.0008871, text_loss=0.0008498, over 443884.08 frames. ], batch size: 6, lr: 1.00e-2025-20252025-04-24 14:44:12,430 INFO [train.py:611] Epoch 1, batch 1350, loss[loss=0.001485, acc=0.6005, codec_acc=0.4329, codec_topk_acc=0.912, codec_loss=0.0008064, text_loss=0.0006787, over 2381.00 frames. ], tot_loss[loss=0.00175, acc=0.555, codec_acc=0.4277, codec_topk_acc=0.9027, codec_loss=0.0008897, text_loss=0.0008606, over 442388.63 frames. ], batch size: 8, lr: 1.2025-042025-04-24 14:44:46,042 INFO [train.py:611] Epoch 1, batch 1400, loss[loss=0.001793, acc=0.521, codec_acc=0.4266, codec_topk_acc=02025-04-24 14:44:46,045 INFO [train.py:611] Epoch 1, batch 1400, loss[loss=0.001764, acc=0.5373, codec_acc=0.423, codec_topk_acc=0.8996, codec_loss=0.0008755, text_loss=0.000889, over 2283.00 frames. ], tot_loss[loss=0.001749, acc=0.5533, code2025-042025-04-24 14:45:29,494 INFO [train.py:611] Epoch 1, batch 1450, loss[loss=0.00149, acc=0.5627, codec_acc=0.4292, codec_topk_acc=0.906, codec_loss=0.000797, text_loss=0.0006934, over 2421.00 frames. ], tot_loss[loss=0.001739, acc=0.5565, codec_acc=0.4283, codec_topk_acc=0.9042, codec_loss=0.0008863, text_loss=0.0008529, over 442707.20 frames. ], batch size: 7, lr: 1.002025-042025-04-24 14:46:00,719 INFO [train.py:611] Epoch 1, batch 1500, loss[loss=0.001662, acc=0.5504, codec_acc=0.4359, codec_topk_acc2025-04-24 14:46:00,721 INFO [train.py:611] Epoch 1, batch 1500, loss[loss=0.001908, acc=0.4532, codec_acc=0.4213, codec_topk_acc=0.9094, codec_loss=0.0008173, text_loss=0.001091, over 2394.00 frames. ], tot_loss[loss=0.001749, acc=0.5544, code2025-04-2025-04-24 14:46:30,531 INFO [train.py:611] Epoch 1, batch 1550, loss[loss=0.001914, acc=0.466, codec_acc=0.4327, codec_topk_acc=0.91162025-04-24 14:46:30,531 INFO [train.py:611] Epoch 1, batch 1550, loss[loss=0.0017, acc=0.556, codec_acc=0.4317, codec_topk_acc=0.8985, codec_loss=0.0008494, text_loss=0.0008504, over 2308.00 frames. ], tot_loss[loss=0.001731, acc=0.5627,2025-02025-2025-04-24 14:46:56,920 INFO [train.py:611] Epoch 1, batch 1600, loss[loss=0.001689, acc=0.4935, codec_acc=0.4338, codec_topk_acc=0.9136, codec_loss=0.0008117, text_loss=0.0008773, over 2353.00 frames. ], tot_loss[loss=0.001736, acc=0.5564, codec_acc=0.4302, codec_topk_acc=0.9053, codec_loss=0.0008799, text_loss=0.0008565, over 444167.73 frames. ], batch size: 10, lr:2025-042025-2025-04-24 14:47:21,990 INFO [train.py:611] Epoch 1, batch 1650, loss[loss=0.001781, acc=0.4828, codec_acc=0.4289, codec_topk_acc=0.9073, codec_loss=0.0008517, text_loss=0.0009291, over 2299.00 frames. ], tot_loss[loss=0.001735, acc=0.5591, codec_acc=0.4299, codec_topk_acc=0.9054, codec_loss=0.0008816, text_loss=0.0008533, over 443264.11 frames. ], batch size: 9, 2025-04-24 14:47:47,023 INFO [train.py:611] Epoch 1, batch 1700, loss[loss=0.002372, acc=0.4865, codec_acc=0.414, codec_topk_acc=0.8896, codec_loss=0.001008, text_loss=0.001364, over 2005.00 frames. ], tot_loss[loss=0.001731, acc=0.5579, codec_acc=0.4303, codec_topk_acc=0.9057, codec_loss=0.0008805, text_loss=0.0008508, over 443321.27 frames. ], batch size: 4, lr: 1.00e-04, 2025-04-242025-04-24 14:48:13,189 INFO [train.py:611] Epoch 1, batch 1750, loss[loss=0.001808, acc=0.4746, codec_acc=0.4239, codec_topk_a2025-04-24 14:48:13,189 INFO [train.py:611] Epoch 1, batch 1750, loss[loss=0.001747, acc=0.501, codec_acc=0.4313, codec_topk_acc=0.9004, codec_loss=0.0008267, text_loss=0.0009206, over 2386.00 frames. ], tot_loss[loss=0.001736, acc=0.5594, cod2025-04-24202025-04-24 14:48:42,987 INFO [train.py:611] Epoch 1, batch 1800, loss[loss=0.001535, acc=0.5221, codec_acc=0.411, codec_topk2025-04-24 14:48:42,987 INFO [train.py:611] Epoch 1, batch 1800, loss[loss=0.001709, acc=0.6368, codec_acc=0.4325, codec_topk_acc=0.9051, codec_loss=0.001066, text_loss=0.0006428, over 1830.00 frames. ], tot_loss[loss=0.001728, acc=0.5622, codec2025-04-220252025-04-24 14:49:10,746 INFO [train.py:611] Epoch 1, batch 1850, loss[loss=0.001993, acc=0.441, codec_acc=0.4326, codec_topk2025-04-24 14:49:10,746 INFO [train.py:611] Epoch 1, batch 1850, loss[loss=0.001793, acc=0.5206, codec_acc=0.4274, codec_topk_acc=0.9136, codec_loss=0.0008624, text_loss=0.0009301, over 2223.00 frames. ], tot_loss[loss=0.001734, acc=0.5598, co2025-04-2025-04-24 14:49:36,203 INFO [train.py:611] Epoch 1, batch 1900, loss[loss=0.001487, acc=0.6507, codec_acc=0.4261, codec_topk_acc=0.9121, codec_loss=0.0008402, text_loss=0.0006471, over 2299.00 frames. ], tot_loss[loss=0.001726, acc2025-04-24 14:49:36,203 INFO [train.py:611] Epoch 1, batch 1900, loss[loss=0.001691, acc=0.5467, codec_acc=0.4229, codec_topk_acc=0.9033, cod2025-022025-04-24 14:50:03,816 INFO [train.py:611] Epoch 1, batch 1950, loss[loss=0.001869, acc=0.6465, codec_acc=0.4139, codec_topk_acc=0.8906, codec_loss=0.001095, text_loss=0.0007745, over 1854.00 frames. ], tot_loss[loss=0.001735, acc=02025-04-24 14:50:03,817 INFO [train.py:611] Epoch 1, batch 1950, loss[loss=0.002548, acc=0.4713, codec_acc=0.4405, codec_topk_acc=0.9075, co22025-02025-04-24 14:50:29,051 INFO [train.py:611] Epoch 1, batch 2000, loss[loss=0.002108, acc=0.5735, codec_acc=0.4364, codec_topk_acc=0.8914, codec_loss=0.001061, text_loss=0.001047, over 1862.00 frames. ], tot_loss[loss=0.001731, acc=0.5559, codec_acc=0.4318, codec_topk_acc=0.9077, codec_loss=0.0008768, text_loss=0.0008544, over 442687.97 frames. ], batch size: 2, lr: 1.0020220252025-04-24 14:51:00,676 INFO [train.py:611] Epoch 1, batch 2050, loss[loss=0.001627, acc=0.5578, codec_acc=0.4381, codec_topk_acc=0.9131, codec_loss=0.0008173, text_loss=0.00081, over 2324.00 frames. ], tot_loss[loss=0.001729, acc=0.5579, codec_acc=0.4323, codec_topk_acc=0.9082, codec_loss=0.0008764, text_loss=0.0008529, over 442203.96 frames. ], batch size: 12, lr: 1.00e202025-02025-04-24 14:51:37,550 INFO [train.py:611] Epoch 1, batch 2100, loss[loss=0.001644, acc=0.556, codec_2025-04-24 14:51:37,551 INFO [train.py:611] Epoch 1, batch 2100, loss[loss=0.00177, acc=0.4976, codec_acc=0.4371, codec_topk_acc=0.9148, codec_loss=0.0007989, text_loss=0.000971, over 2363.00 frames. ], tot_loss[loss=0.001716, acc=0.5618, codec_acc=0.4317, codec_topk_acc=2025-04-24 14:52:09,084 INFO [train.py:611] Epoch 1, batch 2150, loss[loss=0.001678, acc=0.5054, codec_acc=0.4662, codec_topk_acc=0.9186, codec_loss=0.0007454, text_loss=0.0009323, over 2422.00 frames. ], tot_loss[loss=0.001719, acc=2025-04-24 14:52:09,084 INFO [train.py:611] Epoch 1, batch 2150, loss[loss=0.001614, acc=0.5247, codec_acc=0.4494, codec_topk_acc=0.9131, codec_lo202022025-04-24 14:52:44,984 INFO [train.py:611] Epoch 1, batch 2200, loss[loss=0.00166, acc=0.5324, codec_acc=0.4329, codec_topk_acc=0.9122, codec_loss=0.0008061, text_loss=0.0008535, over 2366.00 frames. ], tot_loss[loss=0.001715, acc=0.5635, codec_acc=0.4338, codec_topk_acc=0.9093, codec_loss=0.0008702, text_loss=0.000845, over 443445.20 frames. ], batch size: 10, lr: 1.00e-042025-04-24 14:53:21,043 INFO [train.py:611] Epoch 1, batch 2250, loss[loss=0.001375, acc=0.6151, codec_acc=0.4231, codec_topk_acc=0.8997, codec_loss=0.0008014, text_loss=0.0005733, over 2474.00 frames. ], tot_loss[loss=0.001741, acc=0.5539, codec_acc=0.4327, codec_topk_acc=0.9096, codec_loss=0.0008764, text_loss=0.0008644, over 440768.96 frames. ], batch size: 5, lr: 1.00e-04, 2025-04-24 14:53:47,913 INFO [train.py:611] Epoch 1, batch 2300, loss[loss=0.002629, acc=0.4892, codec_acc=0.4238, codec_topk_acc=0.8914, codec_loss=0.001192, text_loss=0.001437, over 1675.00 frames. ], tot_loss[loss=0.001726, acc=02025-04-24 14:53:47,913 INFO [train.py:611] Epoch 1, batch 2300, loss[loss=0.001525, acc=0.5941, codec_acc=0.4225, codec_topk_acc=0.908, codec_los222025-04-24 14:54:14,035 INFO [train.py:611] Epoch 1, batch 2350, loss[loss=0.001749, acc=0.5368, codec_acc=0.4377, codec_topk_acc=0.9101, codec_loss=0.0008652, text_loss=0.0008834, over 2229.00 frames. ], tot_loss[loss=0.001735, acc=0.5535, codec_acc=0.4341, codec_topk_acc=0.9107, codec_loss=0.0008712, text_loss=0.0008639, over 441623.53 frames. ], batch size: 9, lr: 1.00e-202022025-04-24 14:54:39,651 INFO [train.py:611] Epoch 1, batch 2400, loss[loss=0.00156, acc=0.5, codec_acc=0.4366, codec_topk_acc=0.9176, codec_loss=0.0007938, text_loss=0.0007665, over 2376.00 frames. ], tot_loss[loss=0.00173, acc=0.2025-04-24 14:54:39,653 INFO [train.py:611] Epoch 1, batch 2400, loss[loss=0.001627, acc=0.5579, codec_acc=0.449, codec_topk_acc=0.9166, codec20202025-04-24 14:55:05,438 INFO [train.py:611] Epoch 1, batch 2450, loss[loss=0.001751, acc=0.4811, codec_acc=0.4411, codec_topk_acc=0.9196, codec_loss=0.0008053, text_loss=0.0009456, over 2306.00 frames. ], tot_loss[loss=0.001728, acc=0.5558, codec_acc=0.4357, codec_topk_acc=0.9114, codec_loss=0.0008676, text_loss=0.0008607, over 441589.58 frames. ], batch size: 11, lr: 1.00e-202025-04-24 14:55:30,613 INFO [train.py:611] Epoch 1, batch 2500, loss[loss=0.002299, acc=0.5385, codec_acc=0.439, codec_topk_acc=0.9084, codec_loss=0.001104, text_loss=0.001195, over 1741.00 frames. ], tot_loss[loss=0.00173, acc=0.5572025-04-24 14:55:30,615 INFO [train.py:611] Epoch 1, batch 2500, loss[loss=0.002373, acc=0.5205, codec_acc=0.4295, codec_topk_acc=0.9139, codec2025-04-24 14:55:57,043 INFO [train.py:611] Epoch 1, batch 2550, loss[loss=0.00191, acc=0.4859, codec_acc=0.4202, codec_topk_acc=0.9112025-04-24 14:55:57,043 INFO [train.py:611] Epoch 1, batch 2550, loss[loss=0.001384, acc=0.6747, codec_acc=0.4244, codec_topk_acc=0.8989, codec_loss=0.0008823, text_loss=0.0005017, over 2282.00 frames. ], tot_loss[loss=0.001714, acc=0.5599, cod2025-04-24 14:56:21,375 INFO [train.py:611] Epoch 1, batch 2600, loss[loss=0.001562, acc=0.5736, codec_acc=0.439, codec_topk_acc=0.9154, codec_loss=0.0008442, text_loss=0.0007176, over 2216.00 frames. ], tot_loss[loss=0.001715, acc=0.5614, codec_acc=0.4342, codec_topk_acc=0.912, codec_loss=0.0008668, text_loss=0.0008478, over 442403.40 frames. ], batch size: 3, lr: 1.00e-04, 2025-04-24 14:56:46,686 INFO [train.py:611] Epoch 1, batch 2650, loss[loss=0.001582, acc=0.5866, codec_acc=0.4265, codec_topk_acc=0.90032025-04-24 14:56:46,686 INFO [train.py:611] Epoch 1, batch 2650, loss[loss=0.00144, acc=0.6849, codec_acc=0.4387, codec_topk_acc=0.9126, codec_loss=0.0008335, text_loss=0.0006065, over 2292.00 frames. ], tot_loss[loss=0.001696, acc=0.5648, code2025-04-24 14:57:23,740 INFO [train.py:611] Epoch 1, batch 2700, loss[loss=0.001744, acc=0.5157, codec_acc=0.4385, codec_topk_acc=0.909, codec_loss=0.0008064, text_loss=0.0009378, over 2379.00 frames. ], tot_loss[loss=0.001713, acc=0.5602025-04-24 14:57:23,740 INFO [train.py:611] Epoch 1, batch 2700, loss[loss=0.001719, acc=0.5016, codec_acc=0.4426, codec_topk_acc=0.9156, cod2020252025-04-24 14:57:48,769 INFO [train.py:611] Epoch 1, batch 2750, loss[loss=0.00137, acc=0.759, codec_ac2025-04-24 14:57:48,769 INFO [train.py:611] Epoch 1, batch 2750, loss[loss=0.001644, acc=0.6798, codec_acc=0.4223, codec_topk_acc=0.9145, codec_loss=0.0009878, text_loss=0.0006563, over 1973.00 frames. ], tot_loss[loss=0.001718, acc=0.5579, codec_acc=0.4364, codec_topk_22025-2025-04-24 14:58:21,043 INFO [train.py:611] Epoch 1, batch 2800, loss[loss=0.001794, acc=0.456, codec_acc2025-04-24 14:58:21,044 INFO [train.py:611] Epoch 1, batch 2800, loss[loss=0.001685, acc=0.5158, codec_acc=0.4377, codec_topk_acc=0.9108, codec_loss=0.0008083, text_loss=0.0008764, over 2372.00 frames. ], tot_loss[loss=0.001716, acc=0.5573, codec_acc=0.4365, codec_to2202025-04-24 14:58:45,946 INFO [train.py:611] Epoch 1, batch 2850, loss[loss=0.001593, acc=0.6442, codec_acc=0.4532, codec_topk_acc=0.91, codec_loss=0.0009182, text_loss=0.0006744, over 2044.00 frames. ], tot_loss[loss=0.001715, acc=0.5563, codec_acc=0.4369, codec_topk_acc=0.9152, codec_loss=0.0008579, text_loss=0.0008575, over 442001.26 frames. ], batch size: 5, lr: 1.00e-042022025-04-24 14:59:21,978 INFO [train.py:611] Epoch 1, batch 2900, loss[loss=0.001812, acc=0.5803, codec_acc=0.4457, codec_topk_acc=0.9223, codec_loss=0.0008692, text_loss=0.0009433, over 2149.00 frames. ], tot_loss[loss=0.001717, acc=0.5569, 2025-04-24 14:59:21,978 INFO [train.py:611] Epoch 1, batch 2900, loss[loss=0.001374, acc=0.7303, codec_acc=0.4071, codec_topk_acc=0.8855202025-04-24 14:59:58,150 INFO [train.py:611] Epoch 1, batch 2950, loss[loss=0.001844, acc=0.5048, codec_acc=0.4469, codec_topk_acc=0.9248, c2025-04-24 14:59:58,150 INFO [train.py:611] Epoch 1, batch 2950, loss[loss=0.0019, acc=0.481, codec_acc=0.4239, codec_topk_acc=0.9095, codec_loss=0.0009146, text_loss=0.0009857, over 2135.00 frames. ], tot_loss[loss=0.001713, acc=0.5587, co2202025-04-24 15:00:23,232 INFO [train.py:611] Epoch 1, batch 3000, loss[loss=0.001749, acc=0.5203, codec_acc=0.4317, codec_topk_acc=0.9127, codec_loss=0.0008411, text_loss=0.0009076, over 2287.00 frames. ], tot_loss[loss=0.001706, acc=0.552025-04-24 15:00:23,233 INFO [train.py:611] Epoch 1, batch 3000, loss[loss=0.001762, acc=0.5114, codec_acc=0.4546, codec_topk_acc=0.9226, cod2022025-04-24 15:00:48,358 INFO [train.py:611] Epoch 1, batch 3050, loss[loss=0.001676, acc=0.5103, codec_acc=0.44122025-04-24 15:00:48,2025-04-24 15:00:48,359 INFO [train.py:611] Epoch 1, batch 3050, loss[loss=0.001986, acc=0.4397, codec_acc=0.4371, codec_topk_acc=0.916, codec_loss=0.0008536, text_loss=0.001133, over 2226.00 frames. ], tot_loss[loss=0.001699, acc=0.5615, cod202202522025-04-24 15:01:20,156 INFO [train.py:611] Epoch 1, batch 3100, loss[loss=0.001707, acc=0.5801, codec_acc=0.42025-04-24 15:01:20,156 INFO [train.py:611] Epoch 1, batch 3100, loss[loss=0.001574, acc=0.6303, codec_acc=0.4315, codec_topk_acc=0.9247, codec_loss=0.0008802, text_loss=0.0006943, over 2125.00 frames. ], tot_loss[loss=0.001688, acc=0.5612, codec_acc=0.4383, cod2025202022025-04-24 15:01:46,482 INFO [train.py:611] Epoch 1, batch 3150, loss[loss=0.001742, acc=0.4958, codec_acc=0.4585, codec_topk_acc=0.9262, codec_loss=0.000836, text_loss=0.0009065, over 2158.00 frames. ], tot_loss[loss=0.001709, acc=0.5522, codec_acc=0.4389, codec_topk_acc=0.9181, codec_loss=0.0008452, text_loss=0.0008639, over 445498.49 frames. ], batch size: 5, lr: 120220252025-04-24 15:02:14,141 INFO [train.py:611] Epoch 1, batch 3200, loss[loss=0.001709, acc=0.5058, codec_acc=0.4603, codec_topk_acc=0.9349, codec_loss=0.0007659, text_loss=0.0009431, over 2315.00 frames. ], tot_loss[loss=0.001706, acc=0.5529, codec_acc=0.4382, codec_topk_acc=0.9177, codec_loss=0.0008465, text_loss=0.0008596, over 445047.00 frames. ], batch size: 11, lr: 1.00202025-2025-04-24 15:02:39,910 INFO [train.py:611] Epoch 1, batch 3250, loss[loss=0.001627, acc=0.5239, codec_acc=0.4502025-04-24 15:02:32025-04-24 15:02:39,910 INFO [train.py:611] Epoch 1, batch 3250, loss[loss=0.001691, acc=0.4949, codec_acc=0.4385, codec_topk_acc=0.9248, codec_loss=0.0008047, text_loss=0.0008859, over 2308.00 frames. ], tot_loss[loss=0.001712, acc=0.5574, c2025-042025-04-24 15:03:06,119 INFO [train.py:611] Epoch 1, batch 3300, loss[loss=0.001589, acc=0.5757, codec_acc=0.4465, codec_topk_acc=0.9324, codec_loss=0.0008131, text_loss=0.0007763, over 2214.00 frames. ], tot_loss[loss=0.001705, acc=0.5537, codec_acc=0.4383, codec_topk_acc=0.9186, codec_loss=0.0008461, text_loss=0.0008593, over 444375.82 frames. ], batch size: 8, lr: 1.2025-04-22025-04-24 15:03:32,130 INFO [train.py:611] Epoch 1, batch 3350, loss[loss=0.001784, acc=0.6562, cod2025-04-24 15:03:32,131 INFO [train.py:611] Epoch 1, batch 3350, loss[loss=0.001433, acc=0.7398, codec_acc=0.433, codec_topk_acc=0.9167, codec_loss=0.000993, text_loss=0.0004402, over 1875.00 frames. ], tot_loss[loss=0.0017, acc=0.5566, codec_acc=0.4397, codec_topk_acc2025-20252025-04-24 15:03:57,386 INFO [train.py:611] Epoch 1, batch 3400, loss[loss=0.001746, acc=0.6442, codec_acc=0.4571, codec_topk_acc=0.9187, codec_loss=0.0009881, text_loss=0.0007575, over 1851.00 frames. ], tot_loss[loss=0.001698, acc=0.5559, codec_acc=0.4389, codec_topk_acc=0.9196, codec_loss=0.0008429, text_loss=0.0008547, over 444751.64 frames. ], batch size: 2, lr: 2025-02025-04-24 15:04:28,069 INFO [train.py:611] Epoch 1, batch 3450, loss[loss=0.001753, acc=0.6809, codec_acc=0.4043, codec_topk_acc=0.9051, codec_loss=0.001076, text_loss=0.0006766, over 1863.00 frames. ], tot_loss[loss=0.0017, acc=0.555, codec_acc=0.4392, codec_topk_acc=0.9197, codec_loss=0.0008435, text_loss=0.0008565, over 444717.31 frames. ], batch size: 3, lr: 1.00e-042022025-04-22025-04-24 15:05:10,514 INFO [train.py:611] Epoch 1, batch 3500, loss[loss=0.001695, acc=0.52025-04-24 15:05:10,515 INFO [train.py:611] Epoch 1, batch 3500, loss[loss=0.001865, acc=0.5588, codec_acc=0.423, codec_topk_acc=0.9227, codec_loss=0.000919, text_loss=0.0009461, over 2080.00 frames. ], tot_loss[loss=0.001701, acc=0.5575, codec_acc=0.4392, codec_topk_acc=0.922025-04-24 2025-04-24 15:05:36,752 INFO [train.py:611] Epoch 1, batch 3550, loss[loss=0.001918, acc=0.492, codec_acc=0.4315, codec_to2025-04-24 15:05:36,752 INFO [train.py:611] Epoch 1, batch 3550, loss[loss=0.001925, acc=0.4515, codec_acc=0.4442, codec_topk_acc=0.9163, codec_loss=0.0008608, text_loss=0.001064, over 2204.00 frames. ], tot_loss[loss=0.001704, acc=0.5544, cod2025-04-24 15:2025-04-24 15:06:18,417 INFO [train.py:611] Epoch 1, batch 3600, loss[loss=0.002238, acc=0.4851, codec_acc=0.432025-04-24 15:06:18,419 INFO [train.py:611] Epoch 1, batch 3600, loss[loss=0.002197, acc=0.4966, codec_acc=0.4454, codec_topk_acc=0.9284, codec_loss=0.0009798, text_loss=0.001217, over 1852.00 frames. ], tot_loss[loss=0.001705, acc=0.5564, codec_acc=0.43932025-04-24 12025-04-24 15:06:45,456 INFO [train.py:611] Epoch 1, batch 3650, loss[loss=0.001704, acc=02025-04-24 15:06:45,456 INFO [train.py:611] Epoch 1, batch 3650, loss[loss=0.001772, acc=0.5153, codec_acc=0.44, codec_topk_acc=0.921, codec_loss=0.0008071, text_loss=0.000965, over 2317.00 frames. ], tot_loss[loss=0.001713, acc=0.5514, codec_acc=0.4407, codec_topk_acc=0.9223, c2025-04-24 15:07:11,608 INFO [train.py:611] Epoch 1, batch 3700, loss[loss=0.001543, acc=0.5967, codec_acc=0.4586, codec_topk_acc=0.9302025-04-24 15:07:11,609 INFO [train.py:611] Epoch 1, batch 3700, loss[loss=0.00163, acc=0.5334, codec_acc=0.439, codec_topk_acc=0.9256, codec_loss=0.0007731, text_loss=0.0008572, over 2389.00 frames. ], tot_loss[loss=0.001691, acc=0.5579, codec_2025-04-24 15:2025-04-24 15:07:42,640 INFO [train.py:611] Epoch 1, batch 3750, loss[loss=0.00203, acc=0.5682, codec_acc=0.4051, codec_topk_acc=0.9024, codec_loss=0.001053, text_loss=0.000977, over 1941.00 frames. ], tot_loss[loss=0.001696, acc=0.5575, codec_acc=0.4396, codec_topk_acc=0.9218, codec_loss=0.0008422, text_loss=0.0008539, over 442718.76 frames. ], batch size: 4, lr: 2025-04-24 2025-04-24 15:08:07,998 INFO [train.py:611] Epoch 1, batch 3800, loss[loss=0.001547, acc=0.5339, codec_acc=0.4617, codec_topk_acc=0.9445, codec_loss=0.0007214, text_loss=0.0008255, over 2385.00 frames. ], tot_loss[loss=0.001688, acc=0.5611, codec_acc=0.4393, codec_topk_acc=0.922, codec_loss=0.0008423, text_loss=0.0008454, over 442564.96 frames. ], batch size: 4, lr:22025-04-24 15:08:33,282 INFO [train.py:611] Epoch 1, batch 3850, loss[loss=0.001613, acc=0.6111, codec_acc=0.4309, codec_topk_acc=0.9259, codec_loss=0.0008629, text_loss=0.00075, over 2175.00 frames. ], tot_loss[loss=0.001696, acc=0.5584, codec_acc=0.4397, codec_topk_acc=0.9215, codec_loss=0.0008407, text_loss=0.0008557, over 444089.47 frames. ], batch size: 5, lr: 1.00e-04, 2025-04-24 12025-04-24 15:08:58,874 INFO [train.py:611] Epoch 1, batch 3900, loss[loss=0.001752, acc=0.6471, codec_acc=02025-04-24 15:08:58,875 INFO [train.py:611] Epoch 1, batch 3900, loss[loss=0.002002, acc=0.5714, codec_acc=0.446, codec_topk_acc=0.8954, codec_loss=0.001059, text_loss=0.0009422, over 1845.00 frames. ], tot_loss[loss=0.001689, acc=0.5576, codec_acc=0.4403, co22025-04-24 12025-04-24 15:09:24,605 INFO [train.py:611] Epoch 1, batch 3950, loss[loss=0.002011, acc=0.575, codec_acc=02025-04-24 15:09:24,605 INFO [train.py:611] Epoch 1, batch 3950, loss[loss=0.001839, acc=0.5825, codec_acc=0.4333, codec_topk_acc=0.9224, codec_loss=0.0009379, text_loss=0.0009011, over 2023.00 frames. ], tot_loss[loss=0.0017, acc=0.5538, codec_acc=0.4404, 2025-04-24 15:02025-04-24 15:09:48,877 INFO [train.py:611] Epoch 1, batch 4000, loss[loss=0.001891, acc=0.4537, codec_acc=0.4504, codec_topk_acc=0.9287, codec_loss=0.0008209, text_loss=0.00107, over 2194.00 frames. ], tot_loss[loss=0.001672, acc=0.5646, codec_acc=0.4397, codec_topk_acc=0.9227, codec_loss=0.0008375, text_loss=0.0008348, over 444410.82 frames. ], batch size: 9, lr2025-04-24 152025-04-24 15:10:19,443 INFO [train.py:611] Epoch 1, batch 4050, loss[loss=0.00167, acc=0.5792, codec_acc=0.4547, codec_topk_acc=0.9326, codec_loss=0.0008063, text_loss=0.0008632, over 2210.00 frames. ], tot_loss[loss=0.001673, acc=0.5634, codec_acc=0.4407, codec_topk_acc=0.9237, codec_loss=0.000834, text_loss=0.0008387, over 444987.91 frames. ], batch size: 9, 2022025-04-24 12025-04-24 15:10:50,508 INFO [train.py:611] Epoch 1, batch 4100, loss[loss=0.001536, acc=0.5501, codec_acc=0.4422, codec_topk_acc=0.9312, codec_loss=0.0007526, text_loss=0.0007836, over 2409.00 frames. ], tot_loss[loss=0.001677, acc=0.5617, codec_acc=0.4403, codec_topk_acc=0.924, codec_loss=0.0008351, text_loss=0.0008418, over 444046.85 frames. ], batch size: 7, 20252025-04-24 2025-04-24 15:11:15,850 INFO [train.py:611] Epoch 1, batch 4150, loss[loss=0.001862025-04-24 15:11:15,850 INFO [train.py:611] Epoch 1, batch 4150, loss[loss=0.001713, acc=0.5658, codec_acc=0.4514, codec_topk_acc=0.9324, codec_loss=0.0008262, text_loss=0.0008864, over 2155.00 frames. ], tot_loss[loss=0.001683, acc=0.5581, codec_acc=0.4403, codec_topk_acc=0.9249, c202025-04-24 152025-04-24 15:11:51,124 INFO [train.py:611] Epoch 1, batch 4200, loss[loss=0.002046, acc=0.5033, codec_acc=0.4281, codec_topk_acc=0.9228, codec_loss=0.0009616, text_loss=0.001084, over 1957.00 frames. ], tot_loss[loss=0.001683, acc=0.5625, codec_acc=0.4398, codec_topk_acc=0.924, codec_loss=0.0008395, text_loss=0.0008432, over 441862.39 frames. ], batch size: 162025-04-24 15:12:2025-04-24 15:12:15,825 INFO [train.py:611] Epoch 1, batch 4250, loss[loss=0.001602, acc=0.6239, codec_acc2025-04-24 15:12:15,825 INFO [train.py:611] Epoch 1, batch 4250, loss[loss=0.001788, acc=0.506, codec_acc=0.4427, codec_topk_acc=0.9303, codec_loss=0.0007825, text_loss=0.001005, over 2328.00 frames. ], tot_loss[loss=0.001678, acc=0.5628, codec_acc=0.4406, 2025-04-24 15:12:2025-04-24 15:12:51,674 INFO [train.py:611] Epoch 1, batch 4300, loss[loss=0.001824, ac2025-04-24 15:12:51,675 INFO [train.py:611] Epoch 1, batch 4300, loss[loss=0.001636, acc=0.5651, codec_acc=0.4366, codec_topk_acc=0.9179, codec_loss=0.0008434, text_loss=0.0007923, over 2212.00 frames. ], tot_loss[loss=0.001693, acc=0.5601, codec_acc=0.4407, codec_topk_acc=0.2025-04-24 15:13:24,092 INFO [train.py:611] Epoch 1, batch 4350, loss[loss=0.002133, acc=0.6508, codec_ac2025-04-24 15:13:242025-04-24 15:2025-04-24 15:13:24,093 INFO [train.py:611] Epoch 1, batch 4350, loss[loss=0.001474, acc=0.8227, codec_acc=0.4377, codec_topk_acc=0.9241, codec_loss=0.001116, text_loss=0.0003577, over 1687.00 frames. ], tot_loss[loss=0.001682, acc=0.56, cod2025-04-24 152025-04-24 15:13:50,337 INFO [train.py:611] Epoch 1, batch 4400, loss[loss=0.001669, acc=0.5383, codec_acc=0.439, codec_topk_acc=0.9321, codec_loss=0.000788, text_loss=0.0008814, over 2321.00 frames. ], tot_loss[loss=0.001685, acc=0.5578, codec_acc=0.4401, codec_topk_acc=0.9255, codec_loss=0.0008312, text_loss=0.0008534, over 444695.76 frames. ], batch size: 12, lr: 1.00e-04, py:611] Epoch 1, batch 4400, loss[loss=0.001764, acc=0.53, codec_acc=0.4456, codec_topk_acc=0.9243, codec_loss=0.0008033, text_loss=0.0009609, over 2300.00 frames. ], tot_loss[loss=0.001676, acc=0.5592, codec_acc=0.4413, codec_topk_acc=0.9264, codec_loss=0.0008298, text_loss=0.0008462, over 444029.29 frames. ], batch size: 12, lr: 1.00e-04,