import torch
import torch.nn as nn

class Normalize(nn.Module):
    def __init__(self, p=2, eps=1e-12):
        super().__init__()
        self.p = p
        self.eps = eps
        self.norm = self.l1 if self.p < 2 else self.lp
    
    def l1(self, x, dim):
        down = torch.abs(x)
        if isinstance(dim, int):
            down, _ = torch.max(down, dim=dim, keepdim=True)
        else:
            for i in dim:
                down, _ = torch.max(down, dim=i, keepdim=True)
        return x / (down + self.eps)
    
    def lp(self, x, dim):
        down = torch.pow(x, self.p)
        down = torch.sum(down, dim=dim, keepdim=True)
        down = torch.pow(down, 1/self.p)
        return x / (down + self.eps)
     
    def forward(self, x, dim):
        return self.norm(x, dim)


class MaxMinNorm(nn.Module):
    def __init__(self, eps=1e-12):
        super().__init__()
        
        self.eps = eps
    
    def forward(self, x):
        B, C, H, W = x.shape[:2]

        x = x.view(B, C, -1)
        xmax, _ = x.max(dim=2, keepdim=True)
        xmin, _ = x.min(dim=2, keepdim=True)
        x = (x - xmin) / (xmax - xmin + self.eps)

        return x.view(B, C, H, W)