eunhwanpark-motiftech commited on
Commit
7d405b9
·
verified ·
1 Parent(s): 4293a01

Update modeling_motif.py

Browse files
Files changed (1) hide show
  1. modeling_motif.py +0 -9
modeling_motif.py CHANGED
@@ -61,17 +61,8 @@ logger = logging.get_logger(__name__)
61
  if is_flash_attn_2_available():
62
  from transformers.modeling_flash_attention_utils import _flash_attention_forward
63
 
64
- try:
65
- moreh_ops = torch.ops.moreh
66
- logger.warning_once("Using moreh ops")
67
- except AttributeError:
68
- logger.warning_once("Failed to import moreh ops")
69
-
70
- #_CHECKPOINT_FOR_DOC = "moreh/Motif-102B"
71
  _CONFIG_FOR_DOC = "MotifConfig"
72
 
73
- #from .moreh_moe import MorehMoeMLP, MorehMoeFusedMLP
74
-
75
 
76
  class MotifRMSNorm(nn.Module):
77
 
 
61
  if is_flash_attn_2_available():
62
  from transformers.modeling_flash_attention_utils import _flash_attention_forward
63
 
 
 
 
 
 
 
 
64
  _CONFIG_FOR_DOC = "MotifConfig"
65
 
 
 
66
 
67
  class MotifRMSNorm(nn.Module):
68