{ "alpha_pattern": {}, "auto_mapping": null, "base_model_name_or_path": "/home/jusheng/shihao/qwen_7B_finetune/models/Qwen-7B-Chat", "bias": "none", "corda_config": null, "eva_config": null, "exclude_modules": null, "fan_in_fan_out": false, "inference_mode": true, "init_lora_weights": true, "layer_replication": null, "layers_pattern": null, "layers_to_transform": null, "loftq_config": {}, "lora_alpha": 16, "lora_bias": false, "lora_dropout": 0.05, "megatron_config": null, "megatron_core": "megatron.core", "modules_to_save": null, "peft_type": "LORA", "qalora_group_size": 16, "r": 16, "rank_pattern": {}, "revision": null, "target_modules": [ "w1", "c_attn", "w2", "c_proj" ], "task_type": "CAUSAL_LM", "trainable_token_indices": null, "use_dora": false, "use_qalora": false, "use_rslora": false }