|
output_dir = "/workspace/ComfyUI/models/loras/out" |
|
dataset = "/workspace/configs/dataset_wan.toml" |
|
epochs = 1000 |
|
micro_batch_size_per_gpu = 1 |
|
pipeline_stages = 1 |
|
gradient_accumulation_steps = 1 |
|
gradient_clipping = 1.0 |
|
warmup_steps = 40 |
|
activation_checkpointing = true |
|
partition_method = "parameters" |
|
save_dtype = "bfloat16" |
|
caching_batch_size = 1 |
|
steps_per_print = 1 |
|
video_clip_mode = "single_beginning" |
|
save_every_n_epochs = 10 |
|
checkpoint_every_n_minutes = 120 |
|
blocks_to_swap = 20 |
|
|
|
eval_every_n_epochs = 1 |
|
eval_before_first_step = true |
|
eval_micro_batch_size_per_gpu = 1 |
|
eval_gradient_accumulation_steps = 1 |
|
|
|
[model] |
|
type = "wan" |
|
ckpt_path = "/workspace/Wan2.1" |
|
transformer_path = '/workspace/ComfyUI/models/diffusion_models/wan2.1_i2v_480p_14B_bf16.safetensors' |
|
llm_path = '/workspace/ComfyUI/models/text_encoders/umt5-xxl-enc-bf16.safetensors' |
|
dtype = "bfloat16" |
|
timestep_sample_method = "logit_normal" |
|
|
|
[adapter] |
|
type = "lora" |
|
rank = 32 |
|
dtype = "bfloat16" |
|
|
|
[optimizer] |
|
type = "adamw_optimi" |
|
lr = 1e-5 |
|
betas = [ 0.9, 0.99,] |
|
weight_decay = 0.01 |
|
|
|
[monitoring] |
|
|
|
enable_wandb = true |
|
wandb_api_key = 'f46df1bb828b735bd22f94fff1be190ba5e046f9' |
|
wandb_tracker_name = 'wan-lora' |
|
wandb_run_name = 'wan-lora' |
|
|