package com.dl4jlpr;

import lombok.extern.slf4j.Slf4j;
import org.deeplearning4j.nn.conf.inputs.InputType;
import org.deeplearning4j.nn.conf.layers.samediff.SDLayerParams;
import org.deeplearning4j.nn.conf.layers.samediff.SameDiffLayer;
import org.deeplearning4j.nn.params.DefaultParamInitializer;
import org.deeplearning4j.nn.weights.WeightInit;
import org.nd4j.autodiff.samediff.SDVariable;
import org.nd4j.autodiff.samediff.SameDiff;
import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.ops.impl.layers.convolution.Pooling2D;
import org.nd4j.linalg.api.ops.impl.layers.convolution.config.Conv2DConfig;
import org.nd4j.linalg.api.ops.impl.layers.convolution.config.Pooling2DConfig;
import org.nd4j.weightinit.impl.XavierInitScheme;

import java.util.Arrays;
import java.util.Map;
import java.util.UUID;

@Slf4j
public class small_basic_block extends SameDiffLayer {
    private long nIn;
    private long nOut;
    private long batchsize;

    public small_basic_block(Builder builder) {
        super(builder);
        this.nIn = builder.nIn;
        this.nOut = builder.nOut;
        this.batchsize=builder.batchsize;
    }
    private small_basic_block()
    {

    }

    @Override
    public SDVariable defineLayer(SameDiff sameDiff, SDVariable layerInput, Map<String, SDVariable> map, SDVariable mask) {
        log.info("Small_basic_block input shape:{}",Arrays.toString(layerInput.getShape()));
        //log.info("Data",Arrays.toString(layerInput.getArr().toFloatVector()));
        long[] shape=layerInput.getShape();
        Conv2DConfig layer1ConvConfig = Conv2DConfig.builder().kH(1).kW(1).sH(1).sW(1).pH(0).pW(0).build();
        Conv2DConfig layer2ConvConfig = Conv2DConfig.builder().kH(3).kW(1).sH(1).sW(1).pH(1).pW(0).build();
        Conv2DConfig layer3ConvConfig = Conv2DConfig.builder().kH(1).kW(3).sH(1).sW(1).pH(0).pW(1).build();
        Conv2DConfig layer4ConvConfig = Conv2DConfig.builder().kH(1).kW(1).sH(1).sW(1).pH(0).pW(0).build();
        SDVariable w1 = sameDiff.var("w1", new XavierInitScheme('c', nIn*shape[2] * shape[3], nOut/4*shape[2] * shape[3]), DataType.FLOAT,1,1,nIn,nOut/4);
        SDVariable b1 = sameDiff.zero("b1", nOut/4);
        SDVariable w2 = sameDiff.var("w2", new XavierInitScheme('c', nOut/4*shape[2] * shape[3], nOut/4*shape[2] * shape[3]), DataType.FLOAT,3,1,nOut/4,nOut/4);
        SDVariable b2 = sameDiff.zero("b2", nOut/4);
        SDVariable w3 = sameDiff.var("w3", new XavierInitScheme('c', nOut/4*shape[2] * shape[3], nOut/4*shape[2] * shape[3]), DataType.FLOAT,1,3,nOut/4,nOut/4);
        SDVariable b3 = sameDiff.zero("b3", nOut/4);
        SDVariable w4 = sameDiff.var("w4", new XavierInitScheme('c', nOut/4*shape[2] * shape[3], nOut*shape[2] * shape[3]), DataType.FLOAT, 1,1,nOut/4,nOut);
        SDVariable b4 = sameDiff.zero("b4", nOut);
        SDVariable conv1 = sameDiff.cnn().conv2d(layerInput,w1,b1,layer1ConvConfig);
        SDVariable relu1 = sameDiff.nn.relu(conv1,0.5);
        SDVariable conv2 = sameDiff.cnn().conv2d(relu1,w2,b2,layer2ConvConfig);
        SDVariable relu2 = sameDiff.nn.relu(conv2,0.5);
        SDVariable conv3 = sameDiff.cnn().conv2d(relu2,w3,b3,layer3ConvConfig);
        SDVariable relu3 = sameDiff.nn.relu(conv3,0.5);
        SDVariable conv4 = sameDiff.cnn().conv2d(relu3,w4,b4,layer4ConvConfig);

        return  conv4;
    }

    @Override
    public void defineParameters(SDLayerParams sdLayerParams) {
        sdLayerParams.addWeightParam(DefaultParamInitializer.WEIGHT_KEY, (int) nIn, 1,(int) batchsize,1);
    }

    @Override
    public void initializeParameters(Map<String, INDArray> map) {
        initWeights( (int) nIn, 1, weightInit,  map.get(DefaultParamInitializer.WEIGHT_KEY));
    }

    @Override
    public InputType getOutputType(int i, InputType inputType) {
        long[] shape=inputType.getShape();
        log.info("Small_basic_block input shape:{} and the lay index:{}",Arrays.toString(shape),String.valueOf(i));
        return InputType.convolutionalFlat(shape[1],shape[2],nOut);
    }
    public static class Builder extends SameDiffLayer.Builder<small_basic_block.Builder> {
        private int nIn;
        private int nOut;
        private int batchsize;
        private WeightInit weightInit;
        @Override
        public small_basic_block.Builder weightInit(WeightInit weightInit) {
            this.weightInit = weightInit;
            return this;
        }

        public small_basic_block.Builder nIn(int nIn){
            this.nIn = nIn;
            return this;
        }

        public small_basic_block.Builder nOut(int nOut){
            this.nOut = nOut;
            return this;
        }
        public small_basic_block.Builder batchsize(int batchsize){
            this.batchsize = batchsize;
            return this;
        }

        @Override
        public small_basic_block build() {
            return new small_basic_block(this);
        }
    }
}