appledora's picture
Upload 6 files
7a1d06b verified
raw
history blame
988 Bytes
from transformers.utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_torch_available,
)
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_recastmlp_llama import (
RECASTMLP_llamaModel,
RECASTMLP_LlamaForCausalLM,
)
from .configuration_recastmlp_llama import RECASTMLP_llama
from transformers import AutoConfig, AutoModel, AutoModelForCausalLM
# Register your models with Auto classes
AutoConfig.register("recastmlp_llama", RECASTMLP_llama)
AutoModel.register(RECASTMLP_llama, RECASTMLP_llamaModel)
AutoModelForCausalLM.register(RECASTMLP_llama, RECASTMLP_LlamaForCausalLM)
_import_structure = {
"configuration_recastmlp_llama": ["RECASTMLP_llama"],
"modeling_recastmlp_llama": ["RECASTMLP_llamaModel", "RECASTMLP_LlamaForCausalLM"],
}
__all__ = ["RECASTMLP_llamaModel", "RECASTMLP_LlamaForCausalLM", "RECASTMLP_llama"]