|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from typing import TYPE_CHECKING |
|
|
|
from transformers.file_utils import _LazyModule, is_torch_available |
|
|
|
|
|
_import_structure = { |
|
"configuration_roformer": ["RoFormerConfig"], |
|
"tokenization_roformer": ["RoFormerTokenizer"], |
|
} |
|
|
|
if is_torch_available(): |
|
_import_structure["modeling_roformer"] = [ |
|
"RoFormerModel", |
|
"RoFormerForMaskedLM", |
|
"RoFormerForMultipleChoice", |
|
"RoFormerPreTrainedModel", |
|
"RoFormerForQuestionAnswering", |
|
"RoFormerForSequenceClassification", |
|
"RoFormerForTokenClassification", |
|
] |
|
|
|
|
|
if TYPE_CHECKING: |
|
from .configuration_roformer import RoFormerConfig |
|
from .tokenization_roformer import RoFormerTokenizer |
|
|
|
if is_torch_available(): |
|
from .modeling_roformer import ( |
|
RoFormerModel, |
|
RoFormerForMaskedLM, |
|
RoFormerForMultipleChoice, |
|
RoFormerPreTrainedModel, |
|
RoFormerForQuestionAnswering, |
|
RoFormerForSequenceClassification, |
|
RoFormerForTokenClassification, |
|
) |
|
|
|
else: |
|
import sys |
|
|
|
sys.modules[__name__] = _LazyModule( |
|
__name__, globals()["__file__"], _import_structure) |
|
|