/*
 * Copyright (c) Huawei Technologies Co., Ltd. 2022-2022. All rights reserved.
 */

package org.example;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;

import java.io.File;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.nio.ByteOrder;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;

import com.mindspore.MSTensor;
import com.mindspore.ModelParallelRunner;
import com.mindspore.config.DataType;
import com.mindspore.config.DeviceType;
import com.mindspore.config.MSContext;
import com.mindspore.config.RunnerConfig;
import static com.mindspore.config.DataType.kNumberTypeFloat;

class MSModel {
    protected Map<String, String> props;

    private int MODEL_NAME_INDEX = 46;
    private String FEATURE_EMBEDDING = "_embedding";
     static final String MS_CONFIG_THREAD_NUM = "ms.config.thread.num";

     static final String MS_CONFIG_CPU_WORKER_NUM = "ms.config.cpu.worker.num";

     static final String MS_CONFIG_CPU_BIND_MODE = "ms.config.cpu.bind.mode";

     static final String MS_CONFIG_CPU_ENABLE_FP16 = "ms.config.cpu.enable.fp16";

     static final String MS_CONFIG_INTER_OP_PARALLEL_NUM = "ms.config.inter.op.parallel.num";

     static final String MS_CONFIG_RUNNER_CONFIG_PATH = "ms.config.runner.config.path";

    private static final int MS_DEFAULT_THREAD_NUM = 4;

    private static final int MS_DEFAULT_WORKER_NUM = 4;
    protected String modelName;
    protected ModelParallelRunner runner;
    public List<MSTensor> inputTensors;
    //
    protected List<MSTensor> outputTensors;
    private MSContext createMsCpuContext() {
        MSContext context = new MSContext();
        // MS模型初始化参数
        int threadNum = MS_DEFAULT_THREAD_NUM;
        if (StringUtils.isNotEmpty(this.props.get(MS_CONFIG_THREAD_NUM))) {
            try {
                threadNum = Integer.parseInt(this.props.get(MS_CONFIG_THREAD_NUM));
            } catch (NumberFormatException e) {
                System.out.println("Input thread num exception");
            }
        }
        int cpuBindModel = 0;
        if (StringUtils.isNotEmpty(this.props.get(MS_CONFIG_CPU_BIND_MODE))) {
            try {
                cpuBindModel = Integer.parseInt(this.props.get(MS_CONFIG_CPU_BIND_MODE));
            } catch (NumberFormatException e) {
                System.out.println("Input cpu bind mode exception");
            }
        }

        boolean ret = context.init(threadNum, cpuBindModel);
        if (!ret) {
            return null;
        }
        int parallelNum = threadNum;
        if (StringUtils.isNotEmpty(this.props.get(MS_CONFIG_INTER_OP_PARALLEL_NUM))) {
            try {
                parallelNum = Integer.parseInt(this.props.get(MS_CONFIG_INTER_OP_PARALLEL_NUM));
            } catch (NumberFormatException e) {
                System.out.println("Input parallel num exception");
            }
        }
        context.setInterOpParallelNum(parallelNum);
        boolean isEnableFloat16 = false;
        if (StringUtils.isNotEmpty(this.props.get(MS_CONFIG_CPU_ENABLE_FP16))) {
            isEnableFloat16 = Boolean.parseBoolean(this.props.get(MS_CONFIG_CPU_ENABLE_FP16));
        }
        ret = context.addDeviceInfo(DeviceType.DT_CPU, isEnableFloat16, 0);
        if (!ret) {
            context.free();
            return null;
        }
        return context;
    }

    private RunnerConfig createMsCpuRunnerConfig(String parentPath) {
        MSContext context = createMsCpuContext();
        if (null == context) {
            return null;
        }
        RunnerConfig config = new RunnerConfig();
        boolean ret = config.init(context);
        if (!ret) {
            context.free();
            return null;
        }
        int workersNum = MS_DEFAULT_WORKER_NUM;
        if (StringUtils.isNotEmpty(this.props.get(MS_CONFIG_CPU_WORKER_NUM))) {
            try {
                workersNum = Integer.parseInt(this.props.get(MS_CONFIG_CPU_WORKER_NUM));
            } catch (NumberFormatException e) {
                System.out.println("Input worker num exception");
            }
        }
        config.setWorkersNum(workersNum);
        if (StringUtils.isNotEmpty(this.props.get(MS_CONFIG_RUNNER_CONFIG_PATH))) {
            String configPath = parentPath + File.separator + this.props.get(MS_CONFIG_RUNNER_CONFIG_PATH);
            config.setConfigPath(configPath);
        }
        return config;
    }

    private void checkInputAndOutputTensors() {
        List<String> inputTensorInfo = new ArrayList();
        String infoTemp = "Tensor<%s>: shape is <%s>, data type is <%d>";
        for (MSTensor msTensor : this.inputTensors) {
            inputTensorInfo.add(
                    String.format(Locale.ROOT, infoTemp, msTensor.tensorName(), Arrays.toString(msTensor.getShape()),
                            msTensor.getDataType()));
        }

        System.out.println("input tensor infos: " + String.join(";", inputTensorInfo));
        List<String> outputTensorInfo = new ArrayList();

        for (MSTensor msTensor : this.outputTensors) {
            outputTensorInfo.add(
                    String.format(Locale.ROOT, infoTemp, msTensor.tensorName(), Arrays.toString(msTensor.getShape()),
                            msTensor.getDataType()));
        }

        System.out.println("output tensor infos: " + String.join(";", outputTensorInfo));
    }

    public void init(String modelPath, Map<String, String> props) {
        File file = new File(modelPath);
        if (!file.exists()) {
            System.out.println(String.format("model file<%s> is not exist", modelPath));
            return;
        }
        this.props = props;
        String fileName = file.getName();
        this.modelName = fileName.substring(0, fileName.lastIndexOf(MODEL_NAME_INDEX));
        System.out.println("Begin Initialize Mind Spore Model " + this.modelName);

        String parentPath = file.getParent();
        RunnerConfig config = createMsCpuRunnerConfig(parentPath);
        if (null == config) {
            System.out.println("Create MS-CPU-Config failed.");
            return;
        }
        this.runner = new ModelParallelRunner();
        boolean ret = this.runner.init(modelPath, config);
        config.free();
        if (!ret) {
            System.out.println("ERROR: build graph failed");
            return;
        }
        this.inputTensors = this.runner.getInputs();
        this.outputTensors = this.runner.getOutputs();
        checkInputAndOutputTensors();
    }

    public Map<String, Object> calc(Map<String, Object> predictParam) {
        if (predictParam == null || predictParam.isEmpty()) {
            return Collections.EMPTY_MAP;
        }
        return trySinglePredict(predictParam);
    }

    private Map<String, Object> trySinglePredict(Map<String, Object> paramMap) {
        Map<String, Object> resultMap = new HashMap();
        if (paramMap.size() != this.inputTensors.size()) {
            System.out.println(String.format("Req paramMap size:%d != inputs size:%d.", paramMap.size(), this.inputTensors.size()));
            return resultMap;
        }

        List<MSTensor> feedTensors = new ArrayList<>();
        for (MSTensor inputTensor : inputTensors) {
            Object param = paramMap.get(inputTensor.tensorName());
            Pair<Integer, Object> obj = (Pair<Integer, Object>) param;
            int[] shapeTensor = inputTensor.getShape();
            shapeTensor[0] = obj.getLeft();
            MSTensor inputMsTensor = MSTensor.createTensor(inputTensor.tensorName(), obj.getRight());
            inputMsTensor.setShape(shapeTensor);
            feedTensors.add(inputMsTensor);
        }

        getResult(feedTensors, resultMap);

        return resultMap;
    }

    private void getResult(List<MSTensor> feedTensors, Map<String, Object> resultMap) {
        List<MSTensor> resultTensors = new ArrayList<>();
        boolean ret = this.runner.predict(feedTensors, resultTensors);
        if (!ret || resultTensors.isEmpty()) {
            System.out.println(String.format("MindSpore Lite run failed, ret is <{}>", ret));
            return;
        }
        for (MSTensor curTensor : resultTensors) {
            switch (curTensor.getDataType()) {
                case kNumberTypeFloat:
                case DataType.kNumberTypeFloat32:
                    resultMap.put(curTensor.tensorName(), curTensor.getFloatData());
                    break;
                case DataType.kNumberTypeInt:
                case DataType.kNumberTypeInt32:
                    resultMap.put(curTensor.tensorName(), curTensor.getIntData());
                    break;
                case DataType.kNumberTypeInt64:
                    resultMap.put(curTensor.tensorName(), curTensor.getLongData());
                    break;
                default:
                    System.out.println(String.format("Tensor<{}> with unknown data type: {}", curTensor.tensorName(), curTensor.getDataType()));
                    break;
            }
            curTensor.free();
        }

        for (MSTensor curTensor : feedTensors) {
            curTensor.free();
        }
    }

    public List<MSTensor> getInputTensors() {
        return this.inputTensors;
    }

    public void destory() {
        this.runner.free();
    }
}

public class Main {

    public static Map<String, Object> GenerateData(MSModel msModel, int batch) {
        Map<String, Object> predictParam = new ConcurrentHashMap<>();
        for (MSTensor inputTensor : msModel.inputTensors) {
            int elementsNum = batch;
            for (int axis = 1; axis < inputTensor.getShape().length; axis++) {
                elementsNum = elementsNum * inputTensor.getShape()[axis];
            }

            switch (inputTensor.getDataType()) {
                case kNumberTypeFloat:
                case DataType.kNumberTypeFloat32:
                    float[] floatVal = new float[elementsNum];
                    predictParam.put(inputTensor.tensorName(), Pair.of(batch, floatVal));
                    break;
                case DataType.kNumberTypeInt:
                case DataType.kNumberTypeInt32:
                    int[] intVal = new int[elementsNum];
                    predictParam.put(inputTensor.tensorName(), Pair.of(batch, intVal));
                    break;
                case DataType.kNumberTypeInt64:
                    long[] longVal = new long[elementsNum];
                    predictParam.put(inputTensor.tensorName(), Pair.of(batch, longVal));
                    break;
                default:
                    System.out.println(String.format("Tensor<{}> with unknown data type: {}", inputTensor.tensorName(), inputTensor.getDataType()));
                    break;
            }
            System.out.printf("inputName : " + inputTensor.tensorName() +", inputShapeStr : " + inputTensor.getShape().toString() + ", inputSize : " + elementsNum + "\n");
        }
        return predictParam;
    }

    public static void main(String[] args) {
        // Press Alt+Enter with your caret at the highlighted text to see how
        // IntelliJ IDEA suggests fixing it.
        System.out.printf("Hello and welcome!\n");


        Map<String, String> props = new HashMap<>();
        props.put(MSModel.MS_CONFIG_THREAD_NUM, "4");
        props.put(MSModel.MS_CONFIG_CPU_WORKER_NUM, "3");
        props.put(MSModel.MS_CONFIG_CPU_BIND_MODE, "false");
        props.put(MSModel.MS_CONFIG_CPU_ENABLE_FP16, "0");
        props.put(MSModel.MS_CONFIG_INTER_OP_PARALLEL_NUM, "1");
        props.put(MSModel.MS_CONFIG_RUNNER_CONFIG_PATH, "");

        String modelPath = "/home/greatpan/mymodel/browser_v50/browser_v50.pb.ms";
        if (args.length > 0) {
            modelPath = args[0];
        }
        System.out.printf("modelPath : " + modelPath + "\n");
        MSModel msModel = new MSModel();
        msModel.init(modelPath, props);

        Map<String, Object> predictParam50 = GenerateData(msModel, 50);
        Map<String, Object> predictParam90 = GenerateData(msModel, 90);
        Map<String, Object> predictParam150 = GenerateData(msModel, 150);
        Map<String, Object> predictParam200 = GenerateData(msModel, 200);

        do {
            Map<String, Object> result1 = msModel.calc(predictParam50);
            Map<String, Object> result2 = msModel.calc(predictParam90);
            Map<String, Object> result3 = msModel.calc(predictParam150);
            Map<String, Object> result4 = msModel.calc(predictParam200);
            System.out.printf("The problem is loop\n");
        } while(false);

//        System.out.printf("The problem is end\n");
    }
}
