import torch
import numpy as np
from tianshou.utils.net.common import MLP
from torch import nn
from typing import Any, Dict, Tuple, Union, Optional, Sequence, Type

from torchsummary import summary

ModuleType = Type[nn.Module]


class CommonCNN(nn.Module):
    def __init__(self, in_shape: int = 4) -> None:
        super().__init__()
        self.net = nn.Sequential(
            nn.Conv2d(in_shape, 32, kernel_size=8, stride=4), nn.ReLU(inplace=True),
            nn.Conv2d(32, 64, kernel_size=4, stride=2), nn.ReLU(inplace=True),
            nn.Conv2d(64, 64, kernel_size=3, stride=1), nn.ReLU(inplace=True),
            nn.Flatten()
        )

    def forward(
            self,
            x: Union[np.ndarray, torch.Tensor],
            state: Optional[Any] = None,
            info: Dict[str, Any] = {},
    ) -> Tuple[torch.Tensor, Any]:
        x = torch.as_tensor(x, device=self.device, dtype=torch.float32)
        x = self.net(x)
        return x, state


class CommonNet(nn.Module):
    def __init__(
            self,
            state_shape: Sequence[int],
            action_shape: Optional[Sequence[int]] = None,
            device: Union[str, int, torch.device] = "cpu",
            linear_sizes: Sequence[int] = (512,),
            norm_layer: Optional[ModuleType] = None,
            activation: Optional[Union[ModuleType, Sequence[ModuleType]]] = nn.ReLU,
            softmax: bool = False,
            linear_only: bool = False,
    ) -> None:
        super().__init__()
        assert not linear_only or len(linear_sizes) > 0
        assert len(state_shape) >= 1

        self.device = device
        self.softmax = softmax
        if action_shape is None:
            self.action_dim = linear_sizes[-1]
            linear_sizes = linear_sizes[:-1]
        else:
            self.action_dim = np.prod(action_shape)

        if linear_only:
            self.net = nn.Sequential(
                nn.Flatten(),
                MLP(np.prod(state_shape), self.action_dim, linear_sizes, norm_layer, activation, device).model
            ) if len(state_shape) > 1\
                else MLP(np.prod(state_shape), self.action_dim, linear_sizes, norm_layer, activation, device)
            self.output_dim = self.net.output_dim if hasattr(self.net, 'output_dim')\
                else self.action_dim
        else:
            self.net = CommonCNN(state_shape[0])
            with torch.no_grad():
                self.output_dim = np.prod(
                    self.net(torch.zeros(1, *state_shape)).shape[1:])

            if len(linear_sizes) > 0:
                self.net = nn.Sequential(
                    self.net,
                    MLP(self.output_dim, self.action_dim, linear_sizes, norm_layer, activation, device).model
                )
                self.output_dim = self.action_dim

    def forward(
            self,
            x: Union[np.ndarray, torch.Tensor],
            state: Optional[Any] = None,
            info: Dict[str, Any] = {},
    ) -> Tuple[torch.Tensor, Any]:
        x = torch.as_tensor(x, device=self.device, dtype=torch.float32)
        x = self.net(x)
        if self.softmax:
            x = torch.softmax(x, dim=-1)
        return x, state


class DuelingNet(nn.Module):
    def __init__(
            self,
            net: nn.Module,
            A: nn.Module,
            V: nn.Module,
    ) -> None:
        super().__init__()
        self.net, self.A, self.V = net, A, V
        self.output_dim = self.A.output_dim

    def forward(
            self,
            x: Union[np.ndarray, torch.Tensor],
            state: Optional[Any] = None,
            info: Dict[str, Any] = {},
    ) -> Tuple[torch.Tensor, Any]:
        r"""Mapping: x -> V(x, \*) + A(s, \*)"""
        x, state = self.net.forward(x, state, info)
        a, v = self.A(x), self.V(x)
        x = a - a.mean(dim=1, keepdim=True) + v
        return x, state

    def print_model(self, input_size, batch_size=-1, device="cuda"):
        print(summary(self.net,
                      input_size=input_size,
                      batch_size=batch_size,
                      device=device)
              )
        print('=========== Advantage Net ===========')
        print(summary(self.A,
                      input_size=(self.net.output_dim,),
                      batch_size=batch_size,
                      device=device)
              )
        print('=========== Value Net ===========')
        print(summary(self.V,
                      input_size=(self.net.output_dim,),
                      batch_size=batch_size,
                      device=device)
              )
