package com.dl4jlpr;

import lombok.extern.slf4j.Slf4j;
import org.deeplearning4j.nn.conf.inputs.InputType;
import org.deeplearning4j.nn.conf.inputs.InvalidInputTypeException;
import org.deeplearning4j.nn.conf.layers.samediff.SDLayerParams;
import org.deeplearning4j.nn.conf.layers.samediff.SameDiffLambdaVertex;
import org.deeplearning4j.nn.conf.layers.samediff.SameDiffLayer;
import org.deeplearning4j.nn.params.DefaultParamInitializer;
import org.deeplearning4j.nn.weights.WeightInit;
import org.nd4j.autodiff.samediff.SDVariable;
import org.nd4j.autodiff.samediff.SameDiff;
import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.weightinit.impl.XavierInitScheme;

import java.util.Arrays;
import java.util.Map;
import java.util.UUID;

@Slf4j
public class RiseJoin  extends SameDiffLambdaVertex {
    //private int[] kernelSize;
    //private int[] strideSize;
    //private long Conv2dnIn;
    //private long Conv2dnOut;
    private int batchsize;
    private int classnum;
    //private long Linearin;
/*
    public RiseJoin(Builder builder)
    {
        //this.Conv2dnIn=builder.Conv2dnIn;
        //this.Conv2dnOut=builder.Conv2dnOut;
        //this.Linearin=builder.Linearin;
        //this.strideSize=builder.strideSize.clone();
        //this.kernelSize=builder.kernelSize.clone();
        this.batchsize=builder.batchsize;
        this.classnum=builder.classnum;
    }

 */
    public RiseJoin(int batchsize,int classnum){
        this.batchsize=batchsize;
        this.classnum=classnum;
    }

    @Override
    public SDVariable defineVertex(SameDiff sameDiff, VertexInputs inputs) {
        long[] shape=inputs.getInput(0).getShape();
        if(inputs.getInput(0).getArr()!=null)
        log.info("RiseJoin defineLayer input shape:{};and the data is {}", Arrays.toString(shape),inputs.getInput(0).getArr(true));
        //INDArray p=inputs.getInput(0).getArr().reshape(batchsize,shape[0]*shape[1]*shape[2]);
        //SDVariable weights = paramTable.get(DefaultParamInitializer.WEIGHT_KEY);
        //SDVariable bias = paramTable.get(DefaultParamInitializer.BIAS_KEY);
        //SDVariable pattern=new SDVariable().setArray(p);
        //pattern=sameDiff.nn().linear(pattern,weights,bias);
        //pattern=sameDiff.nn().relu(pattern,0.5).reshape(batchsize,batchsize,1,1);
        //INDArray rise=pattern.getArr().repeat((int)shape[2],1,1,1);
        //INDArray source=layerInput.getArr();
        //source=Nd4j.hstack(source,rise);
        //SDVariable sum=new SDVariable().setArray(source);
        return inputs.getInput(0).reshape(1,7);
    }

    /*
    @Override
    public SDVariable defineLayer(SameDiff sameDiff, SDVariable layerInput, Map<String, SDVariable> paramTable, SDVariable mask) {
        long[] shape=layerInput.getShape();
        log.info("RiseJoin defineLayer input shape:{};and the data is {}", Arrays.toString(shape),layerInput.getArr(true));
        INDArray p=layerInput.getArr().reshape(batchsize,shape[0]*shape[1]*shape[2]);
        SDVariable weights = paramTable.get(DefaultParamInitializer.WEIGHT_KEY);
        SDVariable bias = paramTable.get(DefaultParamInitializer.BIAS_KEY);
        SDVariable pattern=new SDVariable().setArray(p);
        pattern=sameDiff.nn().linear(pattern,weights,bias);
        pattern=sameDiff.nn().relu(pattern,0.5).reshape(batchsize,batchsize,1,1);
        INDArray rise=pattern.getArr().repeat((int)shape[2],1,1,1);
        INDArray source=layerInput.getArr();
        source=Nd4j.hstack(source,rise);
        SDVariable sum=new SDVariable().setArray(source);
        return sum;
    }

    @Override
    public void defineParameters(SDLayerParams params) {
        //params.addWeightParam(DefaultParamInitializer.WEIGHT_KEY, (int) nIn, 1,(int) batchsize,1);
    }

    @Override
    public void initializeParameters(Map<String, INDArray> params) {
        //initWeights( (int) Conv2dnIn, 1, weightInit,  params.get(DefaultParamInitializer.WEIGHT_KEY));
    }

    @Override
    public InputType getOutputType(int layerIndex, InputType inputType) {
        long[] shape=inputType.getShape();
        log.info("RiseJoin input shape:{} and the lay index:{}",Arrays.toString(shape),String.valueOf(layerIndex));
        return InputType.convolutionalFlat(shape[1],shape[2],batchsize+classnum);
    }

     */
    @Override
    public InputType getOutputType(int layerIndex, InputType... vertexInputs) throws InvalidInputTypeException {
        long[] shape=vertexInputs[0].getShape();
        log.info("RiseJoin input shape:{} and the lay index:{}",Arrays.toString(shape),String.valueOf(layerIndex));
        //return InputType.convolutionalFlat(shape[1],shape[2],batchsize+classnum);
        return InputType.inferInputType(Nd4j.create(new float[7],new long[]{1,7}));
    }
    /*
    public static class Builder extends SameDiffLayer.Builder<Builder> {
        //private int Conv2dnIn;
        //private int Conv2dnOut;
        //private int Linearin;
        private int batchsize;
        private int classnum;
        //private int[] kernelSize;
        //private int[] strideSize;
        //private WeightInit weightInit;
        //@Override
        //public RiseJoin.Builder weightInit(WeightInit weightInit) {
        //    this.weightInit = weightInit;
        //    return this;
        //}


        public RiseJoin.Builder Conv2dnIn(int Conv2dnIn){
            this.Conv2dnIn = Conv2dnIn;
            return this;
        }

        public RiseJoin.Builder Conv2dnOut(int Conv2dnOut){
            this.Conv2dnOut = Conv2dnOut;
            return this;
        }
        public RiseJoin.Builder Linearin(int Linearin){
            this.Linearin = Linearin;
            return this;
        }

        public RiseJoin.Builder kernelSize(int ... kernelSize){
            this.kernelSize=kernelSize.clone();
            return this;
        }
        public RiseJoin.Builder strideSize(int ... strideSize){
            this.strideSize=strideSize.clone();
            return this;
        }


        public RiseJoin.Builder batchsize(int batchSize){
            this.batchsize=batchSize;
            return this;
        }
        public RiseJoin.Builder classNum(int classNum){
            this.classnum=classNum;
            return this;
        }

        public RiseJoin build() {
            return new RiseJoin(this);
        }
    }
    */
}
