Update modeling_motif.py
Browse files- modeling_motif.py +0 -2
modeling_motif.py
CHANGED
@@ -905,8 +905,6 @@ class MotifDecoderLayer(nn.Module):
|
|
905 |
def __init__(self, config: MotifConfig, layer_idx: int):
|
906 |
super().__init__()
|
907 |
self.hidden_size = config.hidden_size
|
908 |
-
if config.use_moreh_attention:
|
909 |
-
config._attn_implementation = "flash_attention_2"
|
910 |
if config.sliding_window and config._attn_implementation != "flash_attention_2":
|
911 |
logger.warning_once(
|
912 |
f"Sliding Window Attention is enabled but not implemented for `{config._attn_implementation}`; "
|
|
|
905 |
def __init__(self, config: MotifConfig, layer_idx: int):
|
906 |
super().__init__()
|
907 |
self.hidden_size = config.hidden_size
|
|
|
|
|
908 |
if config.sliding_window and config._attn_implementation != "flash_attention_2":
|
909 |
logger.warning_once(
|
910 |
f"Sliding Window Attention is enabled but not implemented for `{config._attn_implementation}`; "
|