MAGREF_Wan2.1_I2V_14B-GGUF / Magref_example_workflow.json
lym00's picture
Upload Magref_example_workflow.json
0891d28 verified
{
"id": "21bb3948-55c3-4efb-a330-504b41faf2a9",
"revision": 0,
"last_node_id": 191,
"last_link_id": 344,
"nodes": [
{
"id": 126,
"type": "Note",
"pos": [
-278.7073974609375,
-374.02386474609375
],
"size": [
250.90176391601562,
147.25364685058594
],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"widget_ue_connectable": {}
},
"widgets_values": [
"IF you have 32gb but only 12/16gb or even less vram, you might want to use the \"Low Vram high ram\" loader to offload parts of the model to prevent out of memory errors. You can use this on any card, it can be useful even on a 4090, though be careful with only 16gb ram, you might need to change the virtual vram gb to less."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 127,
"type": "Note",
"pos": [
-13.594443321228027,
-380.0921630859375
],
"size": [
250.90176391601562,
88
],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"widget_ue_connectable": {}
},
"widgets_values": [
"Feel free to enabled those two Optimizations if you have updated torch, and for the one on the left sage attention installed."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 103,
"type": "Note",
"pos": [
140,
0
],
"size": [
440,
100
],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"widget_ue_connectable": {}
},
"widgets_values": [
"Since umt5 is a variant based on T5, the input of prompt supports multiple languages.\n\n---\n\n由于 umt5 是基于 T5 的变体,所以提示词输入是支持多种语言输入的"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 153,
"type": "Note",
"pos": [
-818.7584228515625,
512.3826293945312
],
"size": [
210,
128.9481201171875
],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"widget_ue_connectable": {}
},
"widgets_values": [
"You can chose which clip loader you want to use, you can use ggufs and safetensors with the same loader, if you have a second gpu use the multi gpu loader for offloading clip to the second one. If you dont have one just use the normal one."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 39,
"type": "VAELoader",
"pos": [
-274.67108154296875,
392.6246337890625
],
"size": [
390,
60
],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"slot_index": 0,
"links": [
76,
307
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.34",
"Node name for S&R": "VAELoader",
"models": [
{
"name": "wan_2.1_vae.safetensors",
"url": "https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/vae/wan_2.1_vae.safetensors",
"hash": "2fc39d31359a4b0a64f55876d8ff7fa8d780956ae2cb13463b0223e15148976b",
"hash_type": "SHA256",
"directory": "vae"
}
],
"widget_ue_connectable": {}
},
"widgets_values": [
"wan\\wan_2.1_vae.safetensors"
]
},
{
"id": 151,
"type": "CLIPLoaderGGUF",
"pos": [
-596.4228515625,
512.4306030273438
],
"size": [
270,
82
],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [
268
]
}
],
"properties": {
"cnr_id": "ComfyUI-GGUF",
"ver": "a2b75978fd50c0227a58316619b79d525b88e570",
"Node name for S&R": "CLIPLoaderGGUF",
"widget_ue_connectable": {}
},
"widgets_values": [
"wan\\umt5-xxl-encoder-Q6_K.gguf",
"wan"
]
},
{
"id": 129,
"type": "CLIPLoaderGGUFMultiGPU",
"pos": [
-596.1917114257812,
634.8804321289062
],
"size": [
274.9546813964844,
106
],
"flags": {},
"order": 6,
"mode": 4,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": []
}
],
"properties": {
"cnr_id": "comfyui-multigpu",
"ver": "a05823ff0a5296332ae478b18ab93b46cd996a44",
"Node name for S&R": "CLIPLoaderGGUFMultiGPU",
"widget_ue_connectable": {}
},
"widgets_values": [
"umt5-xxl-encoder-Q8_0.gguf",
"wan",
"cuda:1"
]
},
{
"id": 174,
"type": "LoadImage",
"pos": [
50,
1070
],
"size": [
320,
350
],
"flags": {},
"order": 7,
"mode": 4,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": []
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.34",
"Node name for S&R": "LoadImage",
"widget_ue_connectable": {}
},
"widgets_values": [
"ref11.png",
"image"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [
146.3043975830078,
393.2663269042969
],
"size": [
425.27801513671875,
180.6060791015625
],
"flags": {},
"order": 29,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 182
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"slot_index": 0,
"links": [
303,
306
]
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.34",
"Node name for S&R": "CLIPTextEncode",
"widget_ue_connectable": {}
},
"widgets_values": [
"色调艳丽,过曝,静态,细节模糊不清,字幕,风格,作品,画作,画面,静止,整体发灰,最差质量,低质量,JPEG压缩残留,丑陋的,残缺的,多余的手指,画得不好的手部,画得不好的脸部,畸形的,毁容的,形态畸形的肢体,手指融合,静止不动的画面,杂乱的背景,三条腿,背景人很多,倒着走"
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 108,
"type": "PathchSageAttentionKJ",
"pos": [
-280,
-110
],
"size": [
270,
58
],
"flags": {},
"order": 23,
"mode": 4,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 201
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
187
]
}
],
"properties": {
"cnr_id": "comfyui-kjnodes",
"ver": "5dcda71011870278c35d92ff77a677ed2e538f2d",
"Node name for S&R": "PathchSageAttentionKJ",
"widget_ue_connectable": {}
},
"widgets_values": [
"sageattn_qk_int8_pv_fp8_cuda"
]
},
{
"id": 154,
"type": "UNETLoader",
"pos": [
-600,
-670
],
"size": [
270,
82
],
"flags": {},
"order": 8,
"mode": 4,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
269
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "UNETLoader",
"widget_ue_connectable": {}
},
"widgets_values": [
"wan\\Wan2_1-Wan-I2V-MAGREF-14B_fp8_e4m3fn.safetensors",
"fp8_e4m3fn_fast"
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
620,
460
],
"size": [
210,
46
],
"flags": {
"collapsed": false
},
"order": 36,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 263
},
{
"name": "vae",
"type": "VAE",
"link": 76
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"slot_index": 0,
"links": [
130,
189
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.34",
"Node name for S&R": "VAEDecode",
"widget_ue_connectable": {}
},
"widgets_values": []
},
{
"id": 186,
"type": "CLIPVisionLoader",
"pos": [
-590,
790
],
"size": [
270,
58
],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"links": [
313
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.43",
"Node name for S&R": "CLIPVisionLoader",
"widget_ue_connectable": {}
},
"widgets_values": [
"wan\\clip_vision_h.safetensors"
]
},
{
"id": 187,
"type": "CLIPVisionEncode",
"pos": [
620,
730
],
"size": [
290.390625,
78
],
"flags": {},
"order": 25,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 313
},
{
"name": "image",
"type": "IMAGE",
"link": 337
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [
314
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.43",
"Node name for S&R": "CLIPVisionEncode",
"widget_ue_connectable": {}
},
"widgets_values": [
"none"
]
},
{
"id": 173,
"type": "LoadImage",
"pos": [
-280,
1070
],
"size": [
320,
350
],
"flags": {},
"order": 10,
"mode": 4,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": []
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.34",
"Node name for S&R": "LoadImage",
"widget_ue_connectable": {}
},
"widgets_values": [
"ref10.png",
"image"
]
},
{
"id": 124,
"type": "Any Switch (rgthree)",
"pos": [
-280,
-180
],
"size": [
250.08045959472656,
106
],
"flags": {
"collapsed": true
},
"order": 21,
"mode": 0,
"inputs": [
{
"dir": 3,
"name": "any_01",
"type": "MODEL",
"link": 204
},
{
"dir": 3,
"name": "any_02",
"type": "MODEL",
"link": 205
},
{
"dir": 3,
"name": "any_03",
"type": "MODEL",
"link": 269
},
{
"dir": 3,
"name": "any_04",
"type": "MODEL",
"link": null
},
{
"name": "any_05",
"type": "MODEL",
"link": null
}
],
"outputs": [
{
"dir": 4,
"label": "MODEL",
"name": "*",
"shape": 3,
"type": "MODEL",
"links": [
201
]
}
],
"title": "Model switch",
"properties": {
"cnr_id": "rgthree-comfy",
"ver": "aa6c75a30b3ee8f01d7c9f8b0a126cccdc90616a",
"widget_ue_connectable": {}
},
"widgets_values": []
},
{
"id": 111,
"type": "ModelPatchTorchSettings",
"pos": [
0,
-110
],
"size": [
269.99810791015625,
58
],
"flags": {},
"order": 26,
"mode": 4,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 187
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
316
]
}
],
"properties": {
"cnr_id": "comfyui-kjnodes",
"ver": "5dcda71011870278c35d92ff77a677ed2e538f2d",
"Node name for S&R": "ModelPatchTorchSettings",
"widget_ue_connectable": {}
},
"widgets_values": [
true
]
},
{
"id": 188,
"type": "PatchModelPatcherOrder",
"pos": [
310,
-130
],
"size": [
270,
82
],
"flags": {},
"order": 27,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 316
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
317
]
}
],
"properties": {
"cnr_id": "comfyui-kjnodes",
"ver": "1.1.2",
"Node name for S&R": "PatchModelPatcherOrder",
"widget_ue_connectable": {}
},
"widgets_values": [
"weight_patch_first",
"auto"
]
},
{
"id": 168,
"type": "LoadImage",
"pos": [
50,
680
],
"size": [
320,
350
],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
324
]
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.34",
"Node name for S&R": "LoadImage",
"widget_ue_connectable": {}
},
"widgets_values": [
"2.jpeg",
"image"
]
},
{
"id": 172,
"type": "LoadImage",
"pos": [
-280,
680
],
"size": [
320,
350
],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
333
]
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.34",
"Node name for S&R": "LoadImage",
"widget_ue_connectable": {}
},
"widgets_values": [
"1.jpeg",
"image"
]
},
{
"id": 190,
"type": "ImageResizeKJv2",
"pos": [
-530,
900
],
"size": [
210,
266
],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 333
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
334
]
},
{
"name": "width",
"type": "INT",
"links": null
},
{
"name": "height",
"type": "INT",
"links": null
}
],
"properties": {
"cnr_id": "comfyui-kjnodes",
"ver": "1.1.2",
"Node name for S&R": "ImageResizeKJv2",
"widget_ue_connectable": {}
},
"widgets_values": [
512,
512,
"nearest-exact",
"pad",
"255, 255, 255",
"center",
2,
"cpu"
]
},
{
"id": 159,
"type": "PreviewImage",
"pos": [
390,
840
],
"size": [
210,
460
],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 335
}
],
"outputs": [],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "PreviewImage",
"widget_ue_connectable": {}
},
"widgets_values": []
},
{
"id": 189,
"type": "ImageConcatMulti",
"pos": [
390,
640
],
"size": [
210,
150
],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "image_1",
"type": "IMAGE",
"link": 334
},
{
"name": "image_2",
"type": "IMAGE",
"link": 324
}
],
"outputs": [
{
"name": "images",
"type": "IMAGE",
"links": [
335,
336,
337
]
}
],
"properties": {
"cnr_id": "comfyui-kjnodes",
"ver": "1.1.1",
"widget_ue_connectable": {}
},
"widgets_values": [
2,
"right",
true,
null
],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 184,
"type": "WanImageToVideo",
"pos": [
620,
200
],
"size": [
270,
210
],
"flags": {},
"order": 31,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 305
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 306
},
{
"name": "vae",
"type": "VAE",
"link": 307
},
{
"name": "clip_vision_output",
"shape": 7,
"type": "CLIP_VISION_OUTPUT",
"link": 314
},
{
"name": "start_image",
"shape": 7,
"type": "IMAGE",
"link": 336
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [
308
]
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [
309,
311
]
},
{
"name": "latent",
"type": "LATENT",
"links": [
312,
338
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.43",
"Node name for S&R": "WanImageToVideo",
"widget_ue_connectable": {}
},
"widgets_values": [
832,
480,
17,
1
]
},
{
"id": 121,
"type": "Note",
"pos": [
970,
-100
],
"size": [
210,
88
],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"widget_ue_connectable": {}
},
"widgets_values": [
"Test around with the shift value to get your prefered resutls, you can also disable it."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 191,
"type": "WanVideoEnhanceAVideoKJ",
"pos": [
610,
20
],
"size": [
326.1216735839844,
78
],
"flags": {},
"order": 32,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 342
},
{
"name": "latent",
"type": "LATENT",
"link": 338
}
],
"outputs": [
{
"name": "model",
"type": "MODEL",
"links": [
340
]
}
],
"properties": {
"cnr_id": "comfyui-kjnodes",
"ver": "1.1.2",
"Node name for S&R": "WanVideoEnhanceAVideoKJ",
"widget_ue_connectable": {}
},
"widgets_values": [
0.20000000000000004
]
},
{
"id": 109,
"type": "Power Lora Loader (rgthree)",
"pos": [
-262.5567626953125,
40.112430572509766
],
"size": [
370,
262
],
"flags": {},
"order": 28,
"mode": 0,
"inputs": [
{
"dir": 3,
"name": "model",
"type": "MODEL",
"link": 317
},
{
"dir": 3,
"name": "clip",
"type": "CLIP",
"link": 268
}
],
"outputs": [
{
"dir": 4,
"name": "MODEL",
"shape": 3,
"type": "MODEL",
"links": [
342
]
},
{
"dir": 4,
"name": "CLIP",
"shape": 3,
"type": "CLIP",
"links": [
182,
186
]
}
],
"properties": {
"cnr_id": "rgthree-comfy",
"ver": "6c5f7c95ed8487fe08e42bd0341cae6dc9c0f0ad",
"Show Strengths": "Single Strength",
"widget_ue_connectable": {}
},
"widgets_values": [
{},
{
"type": "PowerLoraLoaderHeaderWidget"
},
{
"on": true,
"lora": "wan\\Wan21_T2V_14B_lightx2v_cfg_step_distill_lora_rank32.safetensors",
"strength": 1,
"strengthTwo": null
},
{},
""
]
},
{
"id": 182,
"type": "WanVideoNAG",
"pos": [
620,
560
],
"size": [
270,
126
],
"flags": {},
"order": 34,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 343
},
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 303
}
],
"outputs": [
{
"name": "model",
"type": "MODEL",
"links": [
344
]
}
],
"properties": {
"cnr_id": "comfyui-kjnodes",
"ver": "1.1.2",
"Node name for S&R": "WanVideoNAG",
"widget_ue_connectable": {}
},
"widgets_values": [
11,
0.25,
2.5
]
},
{
"id": 125,
"type": "UnetLoaderGGUF",
"pos": [
-598.3447875976562,
-405.9792785644531
],
"size": [
270,
58
],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
204
]
}
],
"properties": {
"cnr_id": "comfyui-gguf",
"ver": "6570efec6992015085f11b84e42d32f6cc71e8b7",
"Node name for S&R": "UnetLoaderGGUF",
"widget_ue_connectable": {}
},
"widgets_values": [
"wan\\MAGREF_Wan2.1_I2V_14B-Q6_K.gguf"
]
},
{
"id": 107,
"type": "UnetLoaderGGUFDisTorchMultiGPU",
"pos": [
-634.0736694335938,
-207.10665893554688
],
"size": [
327.8580017089844,
154
],
"flags": {},
"order": 15,
"mode": 4,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
205
]
}
],
"properties": {
"cnr_id": "comfyui-multigpu",
"ver": "a05823ff0a5296332ae478b18ab93b46cd996a44",
"Node name for S&R": "UnetLoaderGGUFDisTorchMultiGPU",
"widget_ue_connectable": {}
},
"widgets_values": [
"wan\\MAGREF_Wan2.1_I2V_14B-Q6_K.gguf",
"cuda:0",
24,
false,
""
]
},
{
"id": 176,
"type": "Note",
"pos": [
-280,
1470
],
"size": [
660,
120
],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"widget_ue_connectable": {}
},
"widgets_values": [
"Multi-Image to Video\n\nBlend up multiple images into a single video. \n\nYou can use fewer than 3 images, the model will adapt. \n\nTo skip an input, right-click the unused image node and select Bypass (Ctrl + B)."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 48,
"type": "ModelSamplingSD3",
"pos": [
970,
40
],
"size": [
210,
58
],
"flags": {},
"order": 33,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 340
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"slot_index": 0,
"links": [
343
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.34",
"Node name for S&R": "ModelSamplingSD3",
"widget_ue_connectable": {}
},
"widgets_values": [
8.000000000000002
]
},
{
"id": 149,
"type": "KSampler",
"pos": [
923.8870849609375,
194.55450439453125
],
"size": [
400,
495.23077392578125
],
"flags": {},
"order": 35,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 344
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 308
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 309
},
{
"name": "latent_image",
"type": "LATENT",
"link": 312
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
263
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.39",
"Node name for S&R": "KSampler",
"widget_ue_connectable": {}
},
"widgets_values": [
36695669684651,
"randomize",
4,
1,
"euler",
"simple",
1
]
},
{
"id": 123,
"type": "Fast Groups Bypasser (rgthree)",
"pos": [
-280,
-530
],
"size": [
252.24124145507812,
106
],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "OPT_CONNECTION",
"type": "*",
"links": null
}
],
"properties": {
"matchColors": "purple",
"matchTitle": "",
"showNav": true,
"sort": "position",
"customSortAlphabet": "",
"toggleRestriction": "max one",
"widget_ue_connectable": {}
},
"color": "#323",
"bgcolor": "#535"
},
{
"id": 150,
"type": "Fast Groups Bypasser (rgthree)",
"pos": [
-13.024696350097656,
-261.1021728515625
],
"size": [
218.0250244140625,
82
],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "OPT_CONNECTION",
"type": "*",
"links": null
}
],
"title": "Optimizations",
"properties": {
"matchColors": "black",
"matchTitle": "",
"showNav": true,
"sort": "position",
"customSortAlphabet": "",
"toggleRestriction": "default",
"widget_ue_connectable": {}
},
"color": "#222",
"bgcolor": "#000"
},
{
"id": 77,
"type": "MarkdownNote",
"pos": [
-990,
0
],
"size": [
690,
470
],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"widget_ue_connectable": {}
},
"widgets_values": [
"\n\n**diffusion_models** \n- [MAGREF_Wan2.1_14B-GGUF](https://huggingface.co/QuantStack/Phantom_Wan_14B_FusionX-GGUF/tree/main)\n\n**VAE**\n- [wan_2.1_vae.safetensors](https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/vae/wan_2.1_vae.safetensors?download=true)\n\n**Text encoders** Chose one of following model\n- [umt5_xxl_fp16.safetensors](https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/text_encoders/umt5_xxl_fp16.safetensors?download=true)\n- [umt5_xxl_fp8_e4m3fn_scaled.safetensors](https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/text_encoders/umt5_xxl_fp8_e4m3fn_scaled.safetensors?download=true)\n- [umt5-xxl-encoder-gguf](https://huggingface.co/city96/umt5-xxl-encoder-gguf/tree/main)\n\n> You can choose between fp16, fp8 and ggufs.\n\n**CLIP Vision** \n- [clip_vision_h.safetensors](https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/blob/main/split_files/clip_vision/clip_vision_h.safetensors)\n\n**LoRA (Optional for speed optimization)**\n- [LightX2V](https://huggingface.co/Kijai/WanVideo_comfy/blob/main/Wan21_T2V_14B_lightx2v_cfg_step_distill_lora_rank32.safetensors)\n\nFile save location\n\n```v\nComfyUI/\n├── models/\n│ ├── loras/\n│ │ └─── Wan21_T2V_14B_lightx2v_cfg_step_distill_lora_rank32.safetensors\n│ ├── unet/\n│ │ └─── MAGREF_Wan2.1_14B-Qx_x_x.gguf\n│ ├── text_encoders/\n│ │ └─── umt5_xxl_fp8_e4m3fn_scaled.safetensors # or GGUF (umt5-xxl-encoder-Qx_x.gguf)\n│ └── clip_vision/\n│ └── clip_vision_h.safetensors\n│ └── vae/\n│ └── wan_2.1_vae.safetensors\n\n```\n"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
149.87985229492188,
191.60894775390625
],
"size": [
422.84503173828125,
164.31304931640625
],
"flags": {},
"order": 30,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 186
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"slot_index": 0,
"links": [
305
]
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.34",
"Node name for S&R": "CLIPTextEncode",
"widget_ue_connectable": {}
},
"widgets_values": [
"Two men taking a selfie together in an indoor setting. One of them, with a bright and expressive smile, holds the smartphone at arm’s length to frame the shot. He has voluminous, natural-textured hair and appears enthusiastic and energetic. Standing beside him is another man with neatly styled hair and a composed expression, wearing a white athletic jersey with black accents."
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 112,
"type": "VHS_VideoCombine",
"pos": [
1361.6483154296875,
191.31951904296875
],
"size": [
714.7343139648438,
748.8082275390625
],
"flags": {},
"order": 38,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 189
},
{
"name": "audio",
"shape": 7,
"type": "AUDIO",
"link": null
},
{
"name": "meta_batch",
"shape": 7,
"type": "VHS_BatchManager",
"link": null
},
{
"name": "vae",
"shape": 7,
"type": "VAE",
"link": null
}
],
"outputs": [
{
"name": "Filenames",
"type": "VHS_FILENAMES",
"links": null
}
],
"properties": {
"cnr_id": "comfyui-videohelpersuite",
"ver": "1.6.1",
"Node name for S&R": "VHS_VideoCombine",
"widget_ue_connectable": {}
},
"widgets_values": {
"frame_rate": 16,
"loop_count": 0,
"filename_prefix": "magref_14b",
"format": "video/nvenc_h264-mp4",
"pix_fmt": "yuv420p",
"bitrate": 10,
"megabit": true,
"save_metadata": true,
"pingpong": false,
"save_output": true,
"videopreview": {
"hidden": false,
"paused": false,
"params": {
"filename": "magref_14b_00001.mp4",
"subfolder": "",
"type": "output",
"format": "video/nvenc_h264-mp4",
"frame_rate": 16,
"workflow": "magref_14b_00001.png",
"fullpath": "C:\\AI\\ComfyUI_windows_portable_nvidia\\ComfyUI_windows_portable\\ComfyUI\\output\\magref_14b_00001.mp4"
}
}
}
},
{
"id": 70,
"type": "SaveAnimatedWEBP",
"pos": [
2110,
200
],
"size": [
670,
780
],
"flags": {},
"order": 37,
"mode": 4,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 130
}
],
"outputs": [],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.34",
"Node name for S&R": "SaveAnimatedWEBP",
"widget_ue_connectable": {}
},
"widgets_values": [
"magref_14b",
16.000000000000004,
true,
100,
"default"
]
}
],
"links": [
[
76,
39,
0,
8,
1,
"VAE"
],
[
130,
8,
0,
70,
0,
"IMAGE"
],
[
182,
109,
1,
7,
0,
"CLIP"
],
[
186,
109,
1,
6,
0,
"CLIP"
],
[
187,
108,
0,
111,
0,
"MODEL"
],
[
189,
8,
0,
112,
0,
"IMAGE"
],
[
201,
124,
0,
108,
0,
"MODEL"
],
[
204,
125,
0,
124,
0,
"MODEL"
],
[
205,
107,
0,
124,
1,
"MODEL"
],
[
263,
149,
0,
8,
0,
"LATENT"
],
[
268,
151,
0,
109,
1,
"CLIP"
],
[
269,
154,
0,
124,
2,
"MODEL"
],
[
303,
7,
0,
182,
1,
"CONDITIONING"
],
[
305,
6,
0,
184,
0,
"CONDITIONING"
],
[
306,
7,
0,
184,
1,
"CONDITIONING"
],
[
307,
39,
0,
184,
2,
"VAE"
],
[
308,
184,
0,
149,
1,
"CONDITIONING"
],
[
309,
184,
1,
149,
2,
"CONDITIONING"
],
[
311,
184,
1,
147,
0,
"CONDITIONING"
],
[
312,
184,
2,
149,
3,
"LATENT"
],
[
313,
186,
0,
187,
0,
"CLIP_VISION"
],
[
314,
187,
0,
184,
3,
"CLIP_VISION_OUTPUT"
],
[
316,
111,
0,
188,
0,
"MODEL"
],
[
317,
188,
0,
109,
0,
"MODEL"
],
[
324,
168,
0,
189,
1,
"IMAGE"
],
[
333,
172,
0,
190,
0,
"IMAGE"
],
[
334,
190,
0,
189,
0,
"IMAGE"
],
[
335,
189,
0,
159,
0,
"IMAGE"
],
[
336,
189,
0,
184,
4,
"IMAGE"
],
[
337,
189,
0,
187,
1,
"IMAGE"
],
[
338,
184,
2,
191,
1,
"LATENT"
],
[
340,
191,
0,
48,
0,
"MODEL"
],
[
342,
109,
0,
191,
0,
"MODEL"
],
[
343,
48,
0,
182,
0,
"MODEL"
],
[
344,
182,
0,
149,
0,
"MODEL"
]
],
"groups": [
{
"id": 1,
"title": "Load models here",
"bounding": [
-288.1507568359375,
-34.62283706665039,
410,
620
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Prompt",
"bounding": [
140,
120,
450,
470
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Sampling & Decoding",
"bounding": [
610,
120,
730,
1170
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Save Video(Mp4)",
"bounding": [
1360,
120,
730,
1170
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "Save Video(WebP)",
"bounding": [
2100,
120,
690,
1170
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 7,
"title": "Load reference images",
"bounding": [
-290,
610,
670,
820
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 9,
"title": "Low Vram high ram",
"bounding": [
-643.607666015625,
-288.4152526855469,
347.9408874511719,
240.64456176757812
],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 10,
"title": "Normal GGUF loader",
"bounding": [
-644.4185180664062,
-527.482177734375,
348.7563781738281,
234.12091064453125
],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 11,
"title": "SageAttention",
"bounding": [
-280,
-170,
273.95635986328125,
123.33882141113281
],
"color": "#444",
"font_size": 24,
"flags": {}
},
{
"id": 12,
"title": "FP16 Accumulation",
"bounding": [
0,
-170,
292.22314453125,
127.21359252929688
],
"color": "#444",
"font_size": 24,
"flags": {}
},
{
"id": 13,
"title": "Safetensors",
"bounding": [
-640,
-770,
340,
230
],
"color": "#a1309b",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 1.116781577942499,
"offset": [
-1434.5706626492988,
99.66723851095415
]
},
"frontendVersion": "1.21.7",
"node_versions": {
"comfy-core": "0.3.34"
},
"VHS_latentpreview": true,
"VHS_latentpreviewrate": 0,
"VHS_MetadataImage": false,
"VHS_KeepIntermediate": true,
"ue_links": [],
"links_added_by_ue": []
},
"version": 0.4
}