Yukang commited on
Commit
d55fec2
·
verified ·
1 Parent(s): 26a9012

Upload siglip_encoder.py

Browse files
Files changed (1) hide show
  1. siglip_encoder.py +2 -2
siglip_encoder.py CHANGED
@@ -253,7 +253,7 @@ class SiglipVisionTower(VisionTower):
253
  # TODO(ligengl): why pass config here leading to errors?
254
  self.vision_tower = SiglipVisionModel.from_pretrained(
255
  model_name_or_path,
256
- attn_implementation=config._attn_implementation,
257
  torch_dtype=eval(config.model_dtype),
258
  )
259
  self.image_processor = SiglipImageProcessor.from_pretrained(model_name_or_path)
@@ -265,7 +265,7 @@ class SiglipVisionTowerS2(VisionTowerS2):
265
  super().__init__(model_name_or_path, config)
266
  self.vision_tower = SiglipVisionModel.from_pretrained(
267
  model_name_or_path,
268
- attn_implementation=config._attn_implementation,
269
  torch_dtype=eval(config.model_dtype),
270
  )
271
  self.image_processor = SiglipImageProcessor.from_pretrained(model_name_or_path)
 
253
  # TODO(ligengl): why pass config here leading to errors?
254
  self.vision_tower = SiglipVisionModel.from_pretrained(
255
  model_name_or_path,
256
+ attn_implementation="flash_attention_2", #config._attn_implementation,
257
  torch_dtype=eval(config.model_dtype),
258
  )
259
  self.image_processor = SiglipImageProcessor.from_pretrained(model_name_or_path)
 
265
  super().__init__(model_name_or_path, config)
266
  self.vision_tower = SiglipVisionModel.from_pretrained(
267
  model_name_or_path,
268
+ attn_implementation="flash_attention_2", #config._attn_implementation,
269
  torch_dtype=eval(config.model_dtype),
270
  )
271
  self.image_processor = SiglipImageProcessor.from_pretrained(model_name_or_path)