# -*- coding: utf-8 -*-

# File Name： base_module
# Description :
# Author : lirui
# create_date： 2022/6/3
# Change Activity:
from torch import nn
from torch.nn.init import trunc_normal_


class BaseModule(nn.Module):
    """
    base module from all pytorch model.
    """

    def __init__(self):

        super().__init__()

    def init_weights(self):
        """
        init weights


        """

        def _init_weights(m):
            if isinstance(m, nn.Linear):
                trunc_normal_(m.weight, std=.02)
                if isinstance(m, nn.Linear) and m.bias is not None:
                    nn.init.constant_(m.bias, 0)
            elif isinstance(m, nn.LayerNorm):
                nn.init.constant_(m.bias, 0)
                nn.init.constant_(m.weight, 1.0)

        self.apply(_init_weights)
