Spaces:
Runtime error
Runtime error
| # Copyright (c) Facebook, Inc. and its affiliates. | |
| # | |
| # This source code is licensed under the MIT license found in the | |
| # LICENSE file in the root directory of this source tree. | |
| """isort:skip_file""" | |
| from .adaptive_input import AdaptiveInput | |
| from .adaptive_softmax import AdaptiveSoftmax | |
| from .base_layer import BaseLayer | |
| from .beamable_mm import BeamableMM | |
| from .character_token_embedder import CharacterTokenEmbedder | |
| from .conv_tbc import ConvTBC | |
| from .cross_entropy import cross_entropy | |
| from .downsampled_multihead_attention import DownsampledMultiHeadAttention | |
| from .dynamic_convolution import DynamicConv, DynamicConv1dTBC, DynamicConv_scripatable | |
| from .dynamic_crf_layer import DynamicCRF | |
| from .ema_module import EMAModuleConfig, EMAModule | |
| from .fairseq_dropout import FairseqDropout | |
| from .fp32_batch_norm import Fp32BatchNorm | |
| from .fp32_group_norm import Fp32GroupNorm | |
| from .fp32_instance_norm import Fp32InstanceNorm | |
| from .gelu import gelu, gelu_accurate | |
| from .grad_multiply import GradMultiply | |
| from .gumbel_vector_quantizer import GumbelVectorQuantizer | |
| from .kmeans_vector_quantizer import KmeansVectorQuantizer | |
| from .layer_drop import LayerDropModuleList | |
| from .layer_norm import Fp32LayerNorm, LayerNorm | |
| from .learned_positional_embedding import LearnedPositionalEmbedding | |
| from .lightweight_convolution import LightweightConv, LightweightConv1dTBC | |
| from .linearized_convolution import LinearizedConvolution | |
| from .location_attention import LocationAttention | |
| from .lstm_cell_with_zoneout import LSTMCellWithZoneOut | |
| from .multihead_attention import MultiheadAttention | |
| from .positional_embedding import PositionalEmbedding | |
| from .same_pad import SamePad, SamePad2d | |
| from .scalar_bias import ScalarBias | |
| from .sinusoidal_positional_embedding import SinusoidalPositionalEmbedding | |
| from .transformer_sentence_encoder_layer import TransformerSentenceEncoderLayer | |
| from .transformer_sentence_encoder import TransformerSentenceEncoder | |
| from .transpose_last import TransposeLast | |
| from .unfold import unfold1d | |
| from .transformer_layer import TransformerDecoderLayer, TransformerEncoderLayer | |
| from .vggblock import VGGBlock | |
| from .espnet_multihead_attention import ( | |
| ESPNETMultiHeadedAttention, | |
| RelPositionMultiHeadedAttention, | |
| RotaryPositionMultiHeadedAttention, | |
| ) | |
| from .rotary_positional_embedding import RotaryPositionalEmbedding | |
| from .positional_encoding import ( | |
| RelPositionalEncoding, | |
| ) | |
| __all__ = [ | |
| "AdaptiveInput", | |
| "AdaptiveSoftmax", | |
| "BaseLayer", | |
| "BeamableMM", | |
| "CharacterTokenEmbedder", | |
| "ConvTBC", | |
| "cross_entropy", | |
| "DownsampledMultiHeadAttention", | |
| "DynamicConv1dTBC", | |
| "DynamicConv", | |
| "DynamicConv_scripatable", | |
| "DynamicCRF", | |
| "EMAModule", | |
| "EMAModuleConfig", | |
| "FairseqDropout", | |
| "Fp32BatchNorm", | |
| "Fp32GroupNorm", | |
| "Fp32LayerNorm", | |
| "Fp32InstanceNorm", | |
| "gelu", | |
| "gelu_accurate", | |
| "GradMultiply", | |
| "GumbelVectorQuantizer", | |
| "KmeansVectorQuantizer", | |
| "LayerDropModuleList", | |
| "LayerNorm", | |
| "LearnedPositionalEmbedding", | |
| "LightweightConv1dTBC", | |
| "LightweightConv", | |
| "LinearizedConvolution", | |
| "LocationAttention", | |
| "LSTMCellWithZoneOut", | |
| "MultiheadAttention", | |
| "PositionalEmbedding", | |
| "SamePad", | |
| "SamePad2d", | |
| "ScalarBias", | |
| "SinusoidalPositionalEmbedding", | |
| "TransformerSentenceEncoderLayer", | |
| "TransformerSentenceEncoder", | |
| "TransformerDecoderLayer", | |
| "TransformerEncoderLayer", | |
| "TransposeLast", | |
| "VGGBlock", | |
| "unfold1d", | |
| "ESPNETMultiheadedAttention", | |
| "PositionalEmbedding", | |
| "RelPositionMultiHeadedAttention", | |
| "RelPositionalEncoding", | |
| "RotaryPositionalEmbedding", | |
| "RotaryPositionMultiHeadedAttention", | |
| ] | |