from Var import *

# nn类
class nn:

    # Module类
    class Module:

        # 只需要添加params，方便后续模型的处理
        def __init__(self):
            self.params = {}

        # 将模型定义的Var变量全部给params
        def __setattr__(self, key, value):
            if isinstance(value, Var) or isinstance(value, nn.Module):
                self.params[key] = value
            super().__setattr__(key, value)

        def __repr__(self):
            return str(self.params)

        def __getitem__(self, key):
            return self.params[key]

        # 给子类写前向传播
        def forward(self, x):
            pass

        # 用于模型计算
        def __call__(self, x):
            return self.forward(x)

        # 用于模型的保存于随机梯度下降法

        # 获取参数
        def get_params(self,save = False):
            params = []
            for key in self.params:
                if isinstance(self.params[key], Var):
                    if save is False:
                        params.append(self.params[key])
                    else:
                        params.append(str(self.params[key]))
                elif isinstance(self.params[key], nn.Module):
                    for sub_key in self.params[key].params:
                        if isinstance(self.params[key].params[sub_key], Var):
                            if save is False:
                                params.append(self.params[key].params[sub_key])
                            else:
                                params.append(str(self.params[key].params[sub_key]))
            return params

        # 获取模型
        def get_modules(self,save = False):
            modules = {}
            for key,value in self.params.items():
                if isinstance(self.params[key], nn.Module):
                    if save is True:
                        modules[key] = str(value)
                    else:
                        modules[key] = value
            return modules

        def set_params(self,params):
            pass

    # 展平层
    class Flatten(Module):
        def __init__(self):
            super().__init__()
        def forward(self,x):
            return x.reshape(x.data.shape[0], -1)

    # 线性层
    class linear(Module):
        def __init__(self, in_features, out_features, bias=True):
            super().__init__()
            self.in_features = in_features
            self.out_features = out_features
            self.w = Var(np.random.randn(in_features, out_features) * np.sqrt(2 / in_features), require_grad=True)
            self.bias = bias
            if bias:
                self.b = Var(np.zeros((1,out_features)), require_grad=True)
            else:
                self.b = None
        def forward(self, x):
            out = x @ self.w
            if self.b is not None:
                out += self.b
            return out
        def __repr__(self):
            return f"nn.linear({self.in_features},{self.out_features},bias={self.bias})"
        def __call__(self, x):
            return self.forward(x)
        def set_params(self, params):
            self.w = params[0]
            if self.bias is True:
                self.b = params[1]

    # 卷积层
    class conv2d(Module):
        def __init__(self, kernel_h, kernel_w , out_channels = 1,in_channels = 1, stride=1, padding=0, bias=True):
            super().__init__()
            fan_in = in_channels * kernel_h * kernel_w  # 单个卷积核的输入维度
            scale = np.sqrt(2 / fan_in)  # Kaiming缩放
            self.kernel_h = kernel_h
            self.kernel_w = kernel_w
            self.w = Var(
                np.random.randn(out_channels, in_channels, kernel_h, kernel_w) * scale,
                require_grad=True
            )
            self.bias = bias
            if bias:
                self.b = Var(np.zeros((1,out_channels,1,1)), require_grad=True)
            else:
                self.b = None
            self.in_channels = in_channels
            self.out_channels = out_channels
            self.stride = stride
            self.padding = padding
        def forward(self, x):
            out = self.w.conv2d(x,self.out_channels,self.in_channels)
            return out
        def __repr__(self):
            return f"nn.conv2d({self.kernel_h}, {self.kernel_w} , out_channels = {self.out_channels},in_channels = {self.in_channels}, stride={self.stride}, padding={self.padding}, bias={self.bias})"
        def __call__(self, x):
            return self.forward(x)
        def set_params(self, params):
            self.w = params[0]
            if self.bias is True:
                self.b = params[1]


    # Relu层
    class ReLU(Module):
        def __init__(self):
            super().__init__()
        def forward(self, x):
            out = x.relu()
            return out
        def __repr__(self):
            return f"nn.ReLU()"
        def __call__(self, x):
            return self.forward(x)

    # 池化层
    class Pool(Module):
        def __init__(self,kernel_h, kernel_w , stride=1, padding=0,mode='max'):
            super().__init__()
            self.kernel_h = kernel_h
            self.kernel_w = kernel_w
            self.stride = stride
            self.padding = padding
            self.mode = mode
        def forward(self, x):
            if self.mode == 'max':
                out = x.max_pool2d(self.kernel_h, self.kernel_w, stride=self.stride, padding=self.padding)
            else:
                out = x.avg_pool2d(self.kernel_h, self.kernel_w, stride=self.stride, padding=self.padding)
            return out
        def __repr__(self):
            return f"nn.Pool({self.kernel_h}, {self.kernel_w} , stride={self.stride}, padding={self.padding},mode={self.mode})"
        def __call__(self, x):
            return self.forward(x)
