|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from typing import TYPE_CHECKING |
|
|
|
from transformers.file_utils import _LazyModule, is_torch_available |
|
|
|
|
|
_import_structure = { |
|
"configuration_longformer": ["LongformerConfig"], |
|
"tokenization_longformer": ["LongformerTokenizer"], |
|
} |
|
|
|
if is_torch_available(): |
|
_import_structure["modeling_longformer"] = [ |
|
"LongformerModel", |
|
"LongformerForMaskedLM", |
|
"LongformerForMultipleChoice", |
|
"LongformerPreTrainedModel", |
|
"LongformerForQuestionAnswering", |
|
"LongformerForSequenceClassification", |
|
"LongformerForTokenClassification", |
|
] |
|
|
|
|
|
if TYPE_CHECKING: |
|
from .configuration_longformer import LongformerConfig |
|
from .tokenization_longformer import LongformerTokenizer |
|
|
|
if is_torch_available(): |
|
from .modeling_longformer import ( |
|
LongformerModel, |
|
LongformerForMaskedLM, |
|
LongformerForMultipleChoice, |
|
LongformerPreTrainedModel, |
|
LongformerForQuestionAnswering, |
|
LongformerForSequenceClassification, |
|
LongformerForTokenClassification, |
|
) |
|
else: |
|
import sys |
|
|
|
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure) |
|
|