EndlessSora commited on
Commit
fbe15f0
·
1 Parent(s): d939c9c
pipelines/pipeline_flux_infusenet.py CHANGED
@@ -301,7 +301,8 @@ class FluxInfuseNetPipeline(FluxControlNetPipeline):
301
  else:
302
  batch_size = prompt_embeds.shape[0]
303
 
304
- device = self._execution_device
 
305
  dtype = self.transformer.dtype
306
 
307
  # CPU offload controlnet, move back T5 to GPU
 
301
  else:
302
  batch_size = prompt_embeds.shape[0]
303
 
304
+ # device = self._execution_device
305
+ device = 'cuda'
306
  dtype = self.transformer.dtype
307
 
308
  # CPU offload controlnet, move back T5 to GPU
pipelines/pipeline_infu_flux.py CHANGED
@@ -180,6 +180,9 @@ class InfUFluxPipeline:
180
  pipe.controlnet.cpu()
181
  pipe.text_encoder_2.cpu()
182
  torch.cuda.empty_cache()
 
 
 
183
  # pipe.enable_model_cpu_offload()
184
  self.pipe = pipe
185
 
@@ -219,7 +222,7 @@ class InfUFluxPipeline:
219
  image_proj_model.load_state_dict(ipm_state_dict['image_proj'])
220
  del ipm_state_dict
221
  self.image_proj_model_sim = image_proj_model
222
- self.image_proj_model_sim.to('cpu', torch.bfloat16)
223
  self.image_proj_model_sim.eval()
224
 
225
  self.image_proj_model = self.image_proj_model_aes
 
180
  pipe.controlnet.cpu()
181
  pipe.text_encoder_2.cpu()
182
  torch.cuda.empty_cache()
183
+ pipe.text_encoder.to('cuda')
184
+ pipe.transformer.to('cuda')
185
+ pipe.vae.to('cuda')
186
  # pipe.enable_model_cpu_offload()
187
  self.pipe = pipe
188
 
 
222
  image_proj_model.load_state_dict(ipm_state_dict['image_proj'])
223
  del ipm_state_dict
224
  self.image_proj_model_sim = image_proj_model
225
+ self.image_proj_model_sim.to('cpu', dtype=torch.bfloat16)
226
  self.image_proj_model_sim.eval()
227
 
228
  self.image_proj_model = self.image_proj_model_aes