from typing import (
    Any,
    Callable,
    Iterable,
    Mapping,
    Optional,
    Sequence,
    Tuple,
    Union,
)

import torch
from torch import nn
import torch.nn.functional as F


class DenseGeneral(nn.Module):
    """General dense layer with configurable axis and kernel reshaping."""

    def __init__(
        self,
        features: Union[Iterable[int], int],
        use_bias: bool = False,
        axis: Union[Iterable[int], int] = -1,
        dtype: torch.dtype = torch.float32,
        kernel_init: Callable = nn.init.kaiming_uniform_,
        bias_init: Callable = nn.init.zeros_,
        reshape_kernel: bool = True,
    ):
        super(DenseGeneral, self).__init__()
        self.kernel_init = kernel_init
        self.bias_init = bias_init
        self.features = features
        self.use_bias = use_bias
        self.axis = axis
        self.dtype = dtype
        self.reshape_kernel = reshape_kernel

    def forward(self, inputs: torch.Tensor) -> torch.Tensor:
        features = _canonicalize_tuple(self.features)
        axis = _canonicalize_tuple(self.axis)
        inputs = inputs.to(self.dtype)
        axis = _normalize_axes(axis, inputs.ndim)

        kernel_shape = (
            tuple([inputs.shape[ax] for ax in axis]) + features
        )
        if self.reshape_kernel:
            kernel_param_shape = (
                torch.prod(
                    torch.tensor([inputs.shape[ax] for ax in axis])
                ),
                torch.prod(torch.tensor(features)),
            )
        else:
            kernel_param_shape = kernel_shape

        if self.kernel_axis_names is None:
            kernel_axis_names = ["unmodeled"] * len(
                kernel_param_shape
            )
        else:
            kernel_axis_names = self.kernel_axis_names
            if len(kernel_axis_names) != len(kernel_shape):
                raise ValueError(
                    f"Kernel axis names {kernel_axis_names} doesn't match kernel shape {kernel_shape}."
                )
            if self.reshape_kernel:

                def _reshaped_axis_names(names):
                    result = " * ".join(names)
                    return self.reshaped_kernel_axis_name_map.get(
                        result, result
                    )

                kernel_axis_names = (
                    _reshaped_axis_names(
                        kernel_axis_names[: len(axis)]
                    ),
                    _reshaped_axis_names(
                        kernel_axis_names[len(axis) :]
                    ),
                )

        kernel = nn.Parameter(
            torch.empty(kernel_param_shape, dtype=self.dtype)
        )
        self.kernel = nn.init.kaiming_uniform_(kernel)

        out = torch.matmul(inputs, kernel.permute(*axis, -2, -1))

        if self.use_bias:
            bias = nn.Parameter(
                torch.empty(features, dtype=self.dtype)
            )
            self.bias = self.bias_init(bias)
            out = out + self.bias.view(
                *(1 for _ in range(len(axis))), *features
            )

        return out


def _canonicalize_tuple(x):
    if isinstance(x, Iterable):
        return tuple(x)
    else:
        return (x,)


def _normalize_axes(
    axes: Iterable[int], ndim: int
) -> Tuple[int, ...]:
    return tuple([ax if ax >= 0 else ndim + ax for ax in axes])


def _convert_to_activation_function(
    fn_or_string: Union[str, Callable]
) -> Callable:
    if fn_or_string == "linear":
        return lambda x: x
    elif isinstance(fn_or_string, str):
        return getattr(F, fn_or_string)
    elif callable(fn_or_string):
        return fn_or_string
    else:
        raise ValueError(
            "don't know how to convert %s to an activation function"
            % (fn_or_string,)
        )
