#pragma once
#include <LibDL/Tensor/Tensor.h>
#include <LibDL/Tensor/Scalar.h>
#include <LibDL/utils.h>


class init {
public:
	enum class Nonlinearity {
		Linear,
		Conv1D,
		Conv2D,
		Conv3D,
		ConvTranspose1D,
		ConvTranspose2D,
		ConvTranspose3D,
		Sigmoid,
		Tanh,
		ReLU,
		LeakyReLU
	};

	enum class FanMode { FanIn, FanOut };

#define DEFINE_INIT_WRAPPER_FUNCTION(NAME,PARAM_LIST,PARAM_CALL)\
		static Tensor NAME  PARAM_LIST noexcept(false) {\
		return Tensor(torch::nn::init:: NAME PARAM_CALL);\
		}

	DEFINE_INIT_WRAPPER_FUNCTION(xavier_uniform_, (const Tensor &self, double gain = 1.0), (self.core, gain));
	DEFINE_INIT_WRAPPER_FUNCTION(xavier_normal_, (const Tensor &self, double gain = 1.0), (self.core, gain));

	DEFINE_INIT_WRAPPER_FUNCTION(uniform_, (const Tensor &self, double low = 0, double high = 1), (self.core, low, high));
	DEFINE_INIT_WRAPPER_FUNCTION(normal_, (const Tensor &self, double mean = 0, double std = 1), (self.core, mean, std));

	DEFINE_INIT_WRAPPER_FUNCTION(zeros_, (const Tensor &self), (self.core));
	DEFINE_INIT_WRAPPER_FUNCTION(ones_, (const Tensor &self), (self.core));
	DEFINE_INIT_WRAPPER_FUNCTION(eye_, (const Tensor &self), (self.core));
	DEFINE_INIT_WRAPPER_FUNCTION(dirac_, (const Tensor &self), (self.core));

	DEFINE_INIT_WRAPPER_FUNCTION(sparse_, (const Tensor &self, double sparsity, double std = 0.01), (self.core, sparsity, std));
	DEFINE_INIT_WRAPPER_FUNCTION(orthogonal_, (const Tensor &self, double gain = 1.0), (self.core, gain));
	DEFINE_INIT_WRAPPER_FUNCTION(constant_, (const Tensor &t, const Scalar &s), (t.core, s.core));

#ifdef Torch_Version_Less_15
    DEFINE_INIT_WRAPPER_FUNCTION(kaiming_uniform_, (
        const Tensor &self,
        double a = 0,
        FanMode mode = FanMode::FanIn,
        Nonlinearity nonlinearity = Nonlinearity::LeakyReLU
        ),
        (self.core, a, torch::nn::init::FanMode(mode), torch::nn::init::Nonlinearity(nonlinearity))
    );

    DEFINE_INIT_WRAPPER_FUNCTION(kaiming_normal_, (
        const Tensor &self,
        double a = 0,
        FanMode mode = FanMode::FanIn,
        Nonlinearity nonlinearity = Nonlinearity::LeakyReLU
        ),
        (self.core, a, torch::nn::init::FanMode(mode), torch::nn::init::Nonlinearity(nonlinearity))
    );

	static double calculate_gain(Nonlinearity nonlinearity, double param = 0.01) noexcept(false) {
		return torch::nn::init::calculate_gain(torch::nn::init::Nonlinearity(nonlinearity), param);
	}
#else
    static std::map<Nonlinearity, torch::nn::init::NonlinearityType> NonlinearityMap;
	static std::map<FanMode, torch::nn::init::FanModeType> FanModeMap;

    DEFINE_INIT_WRAPPER_FUNCTION(kaiming_uniform_, (
        const Tensor &self,
        double a = 0,
        FanMode mode = FanMode::FanIn,
        Nonlinearity nonlinearity = Nonlinearity::LeakyReLU
        ),
        (self.core, a, torch::nn::init::FanModeType(FanModeMap[mode]), torch::nn::init::NonlinearityType(NonlinearityMap[nonlinearity]))
    );

    DEFINE_INIT_WRAPPER_FUNCTION(kaiming_normal_, (
        const Tensor &self,
        double a = 0,
        FanMode mode = FanMode::FanIn,
        Nonlinearity nonlinearity = Nonlinearity::LeakyReLU
        ),
        (self.core, a, torch::nn::init::FanModeType(FanModeMap[mode]), torch::nn::init::NonlinearityType(NonlinearityMap[nonlinearity]))
    );

    static double calculate_gain(Nonlinearity nonlinearity, double param = 0.01) noexcept(false) {
        return torch::nn::init::calculate_gain(torch::nn::init::NonlinearityType(NonlinearityMap[nonlinearity]), param);
    }
private:
    static std::map<Nonlinearity, torch::nn::init::NonlinearityType> createNonlinearityMap() {
        std::map<Nonlinearity, torch::nn::init::NonlinearityType> map;
        map = {
            { Nonlinearity::Linear, torch::kLinear },
            { Nonlinearity::Conv1D, torch::kConv1D },
            { Nonlinearity::Conv2D, torch::kConv2D },
            { Nonlinearity::Conv3D, torch::kConv3D },
            { Nonlinearity::ConvTranspose1D, torch::kConvTranspose1D },
            { Nonlinearity::ConvTranspose2D, torch::kConvTranspose2D },
            { Nonlinearity::ConvTranspose3D, torch::kConvTranspose3D },
            { Nonlinearity::Sigmoid, torch::kSigmoid },
            { Nonlinearity::Tanh, torch::kTanh },
            { Nonlinearity::ReLU, torch::kReLU },
            { Nonlinearity::LeakyReLU, torch::kLeakyReLU }};
        return map;
    }
    static std::map<FanMode, torch::nn::init::FanModeType> createFanModeMap() {
        std::map<FanMode, torch::nn::init::FanModeType> map;
        map = {
            {FanMode::FanIn, torch::kFanIn},
            {FanMode::FanOut, torch::kFanOut}};
        return map;
    }
#endif
};