from transformers.utils import ( OptionalDependencyNotAvailable, _LazyModule, is_torch_available, ) try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_recast_llama import ( RECAST8b_llamaModel, RECAST8b_LlamaForCausalLM, ) from .configuration_recast_llama import RECAST8b_llama from transformers import AutoConfig, AutoModel, AutoModelForCausalLM # Register your models with Auto classes AutoConfig.register("recast8b_llama", RECAST8b_llama) AutoModel.register(RECAST8b_llama, RECAST8b_llamaModel) AutoModelForCausalLM.register(RECAST8b_llama, RECAST8b_LlamaForCausalLM) _import_structure = { "configuration_recastmlp_llama": ["RECAST8b_llama"], "modeling_recastmlp_llama": ["RECAST8b_llamaModel", "RECAST8b_LlamaForCausalLM"], } __all__ = ["RECAST8b_llamaModel", "RECAST8b_LlamaForCausalLM", "RECAST8b_llama"]