/**
 * Copyright 2021 Huawei Technologies Co., Ltd
 * <p>
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * <p>
 * http://www.apache.org/licenses/LICENSE-2.0
 * <p>
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.mindspore.lite.train_deepfm;

import com.mindspore.lite.MSTensor;
import com.mindspore.lite.LiteSession;
import com.mindspore.lite.TrainSession;
import com.mindspore.lite.config.MSConfig;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Vector;

public class NetRunner {
    private LiteSession session;
    private long batchSize;
    private long dataSize; // one input data size, in byte
    private Dataset ds = new Dataset();
    private int numOfClasses = 2;
    private long cycles = 2;
    private int idx = 1;
    private int virtualBatch = 16;
    private String trainedFilePath = "trained.ms";

    public void initAndFigureInputs(String modelPath, int virtualBatchSize) {
        MSConfig msConfig = new MSConfig();
        // arg 0: DeviceType:DT_CPU -> 0
        // arg 1: ThreadNum -> 2
        // arg 2: cpuBindMode:NO_BIND ->  0
        // arg 3: enable_fp16 -> false
        msConfig.init(0, 2, 0, false);
        session = new LiteSession();
        System.out.println("Model path is " + modelPath);
        session = TrainSession.createTrainSession(modelPath, msConfig, false);
        virtualBatch = virtualBatchSize;
        session.setupVirtualBatch(virtualBatch, 0.01f, 1.00f);

        List<MSTensor> inputs = session.getInputs();
        if (inputs.size() <= 1) {
            System.err.println("model input size: " + inputs.size());
            return;
        }

        batchSize = inputs.get(0).getShape()[0];
        dataSize = inputs.get(0).size() / batchSize;
        System.out.println("batch_size: " + batchSize);
        System.out.println("virtual batch multiplier: " + virtualBatch);
        int index = modelPath.lastIndexOf(".ms");
        if (index == -1) {
            System.out.println("The model " + modelPath + " should be named *.ms");
            return;
        }
        trainedFilePath = modelPath.substring(0, index) + "_trained.ms";
    }

    public int initDB(String datasetPath) {

        ds.initDataset(datasetPath + "/train.json", datasetPath + "/test.json");

        if (ds.getTrainData().size() == 0) {
            System.err.println("train data size is 0");
            return -1;
        }

        if (ds.getTestData().size() == 0) {
            System.err.println("test data size is 0");
            return -1;
        }

        return 0;
    }

    public float getLoss() {
        MSTensor tensor = searchOutputsForSize(1);
        return tensor.getFloatData()[0];
    }

    private MSTensor searchOutputsForSize(int size) {
//        System.out.println("==================");
        int num = 0;
        Map<String, MSTensor> outputs = session.getOutputMapByTensor();
        for (MSTensor tensor : outputs.values()) {
            if (tensor.elementsNum() == size) { //
                return tensor;
            }
            num += tensor.getFloatData().length;
        }
        System.out.println(outputs.size()+"\t"+size+"\t"+num);
        System.err.println("can not find output the tensor which element num is " + size);
        return null;
    }

    public int trainLoop() {
        session.train();
        float min_loss = 1000;
        float max_acc = 0;
        for (int i = 0; i < cycles; i++) {
            for (int b = 0; b < virtualBatch; b++) {
                fillInputData(ds.getTrainData(), false);
                session.runGraph();
                float loss = getLoss();
                if (min_loss > loss) {
                    min_loss = loss;
                }
                if ((b == 0) && ((i + 1) % 500 == 0)) {
                    float acc = calculateAccuracy(10); // only test 10 batch size
                    if (max_acc < acc) {
                        max_acc = acc;
                    }
                    System.out.println("step_" + (i + 1) + ": \tLoss is " + loss + " [min=" + min_loss + "]" + " max_accc=" + max_acc);
                }
            }
        }
        return 0;
    }

    public float calculateAccuracy(long maxTests) {
        float accuracy = 0;
        Vector<Dataset.DataLabelTuple> test_set = ds.getTestData();
        System.out.println("dataset got!");
        long tests = test_set.size() / batchSize;
        System.out.println("eval got tests is" + tests);
        if (maxTests != -1 && tests < maxTests) {
            tests = maxTests;
        }
        session.eval();
        for (long i = 0; i < tests; i++) {
            System.out.println("current step is " + i);
            Vector<Integer> labels = fillInputData(test_set, (maxTests == -1));
            System.out.println(labels.size());
            if (labels.size() != batchSize) {
                System.err.println("unexpected labels size: " + labels.size() + " batch_size size: " + batchSize);
                System.exit(1);
            }
            session.runGraph();
            MSTensor outputsv = searchOutputsForSize((int) (batchSize * numOfClasses));
            if (outputsv == null) {
                System.err.println("can not find output tensor with size: " + batchSize * numOfClasses);
                System.exit(1);
            }
            float[] scores = outputsv.getFloatData();
//            System.out.println("output size is" + scores.length);
//            for (float score : scores) {
//                System.out.println(score);
//                System.out.println(labels.get(0));
//            }
            for (int b = 0; b < batchSize; b++) {
                int max_idx = 0;
                float max_score = scores[(int) (numOfClasses * b)];
                for (int c = 0; c < numOfClasses; c++) {
                    if (scores[(int) (numOfClasses * b + c)] > max_score) {
                        max_score = scores[(int) (numOfClasses * b + c)];
                        max_idx = c;
                    }

                }
                if (labels.get(b) == max_idx) {
                    accuracy += 1.0;
                }
            }
        }
        session.train();
        accuracy /= (batchSize * tests);
        return accuracy;
    }

    // each time fill batch_size data
    Vector<Integer> fillInputData(Vector<Dataset.DataLabelTuple> dataset, boolean serially) {
        Vector<Integer> labelsVec = new Vector<>();
        int totalSize = dataset.size();

        List<MSTensor> inputs = session.getInputs();

        int inputIdsDataCnt = inputs.get(0).elementsNum();
        int[] inputIdsBatchData = new int[inputIdsDataCnt];

        int inputValSDataCnt = inputs.get(1).elementsNum();
        float[] inputValsBatchData = new float[inputValSDataCnt];

        int labelDataCnt = inputs.get(2).elementsNum();
        int[] labelBatchData = new int[labelDataCnt];

//        System.out.println(inputIdsDataCnt + "\t" + inputValSDataCnt + "\t" + labelDataCnt);

        for (int i = 0; i < batchSize; i++) {
            if (serially) {
                idx = (++idx) % totalSize;
            } else {
                idx = (int) (Math.random() * totalSize);
            }

            Dataset.DataLabelTuple dataLabelTuple = dataset.get(idx);
            int label = dataLabelTuple.label.get(0).intValue();
            int[] ids = dataLabelTuple.feat_ids.stream().mapToInt(j -> j).toArray();

            int n = 0;
            float[] vals = new float[dataLabelTuple.feat_vals.size()];
            for (Float f : dataLabelTuple.feat_vals) {
                vals[n++] = (f != null ? f : Float.NaN); // Or whatever default you want.
            }
            System.arraycopy(ids, 0, inputIdsBatchData, i * ids.length, ids.length);
            System.arraycopy(vals, 0, inputValsBatchData, i * vals.length, vals.length);
            labelBatchData[i] = label;
            labelsVec.add(label);
        }

        ByteBuffer byteBufIds = ByteBuffer.allocateDirect(inputIdsBatchData.length * Integer.BYTES);
        byteBufIds.order(ByteOrder.nativeOrder());
        for (int i = 0; i < inputIdsBatchData.length; i++) {
            byteBufIds.putFloat(inputIdsBatchData[i]);
        }
        inputs.get(0).setData(byteBufIds);

        ByteBuffer byteBufVals = ByteBuffer.allocateDirect(inputValsBatchData.length * Float.BYTES);
        byteBufVals.order(ByteOrder.nativeOrder());
        for (int i = 0; i < inputIdsBatchData.length; i++) {
            byteBufVals.putFloat(inputIdsBatchData[i]);
        }
        inputs.get(1).setData(byteBufVals);

        ByteBuffer labelByteBuf = ByteBuffer.allocateDirect(labelBatchData.length * 4);
        labelByteBuf.order(ByteOrder.nativeOrder());
        for (int i = 0; i < labelBatchData.length; i++) {
            labelByteBuf.putInt(labelBatchData[i]);
        }
        inputs.get(2).setData(labelByteBuf);

        return labelsVec;
    }

    public void trainModel(String modelPath, String datasetPath, int virtualBatch) {
        System.out.println("==========Loading Model, Create Train Session=============");
        initAndFigureInputs(modelPath, virtualBatch);
        System.out.println("==========Initing DataSet================");
        initDB(datasetPath);
        System.out.println("==========Training Model===================");
        trainLoop();
        System.out.println("==========Evaluating The Trained Model============");
        float acc = calculateAccuracy(-1);
        System.out.println("accuracy = " + acc);

        if (cycles > 0) {
            // arg 0: FileName
            // arg 1: model type MT_TRAIN -> 0
            // arg 2: quantization type QT_DEFAULT -> 0
            if (session.export(trainedFilePath, 0, 0)) {
                System.out.println("Trained model successfully saved: " + trainedFilePath);
            } else {
                System.err.println("Save model error.");
            }
        }
        session.free();
    }
}
