package com.leolong.example;

import ai.djl.MalformedModelException;
import ai.djl.ModelException;
import ai.djl.inference.Predictor;
import ai.djl.modality.nlp.DefaultVocabulary;
import ai.djl.modality.nlp.Vocabulary;
import ai.djl.modality.nlp.bert.BertFullTokenizer;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.Shape;
import ai.djl.repository.zoo.Criteria;
import ai.djl.repository.zoo.ModelNotFoundException;
import ai.djl.repository.zoo.ZooModel;
import ai.djl.training.util.ProgressBar;
import ai.djl.translate.NoBatchifyTranslator;
import ai.djl.translate.TranslateException;
import ai.djl.translate.TranslatorContext;
import ai.djl.util.Utils;
import java.io.IOException;
import java.math.BigDecimal;
import java.net.URL;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;

/**
 * Created by zhong on 2023/3/29.
 */
public class ErnieClassification {
    private ErnieClassification() {}

    public static void main(String[] args) throws IOException, ModelException, TranslateException {
        System.setProperty("DJL_CACHE_DIR","D:\\djl\\cache");
        System.setProperty("ENGINE_CACHE_DIR","D:\\djl\\cache");
        List<String> inputs = new ArrayList<>();
        inputs.add("山东博远医药有限公司");
        inputs.add("上海博远");

        Integer result = predict(inputs);
        System.out.print(result);
    }

    public static Integer predict(List<String> inputs)
            throws MalformedModelException, ModelNotFoundException, IOException,
            TranslateException {
        // refer to
        // https://medium.com/delvify/bert-rest-inference-from-the-fine-tuned-model-499997b32851 and
        // https://github.com/google-research/bert
        // for converting public bert checkpoints to saved model format.
        String modelUrl = "classpath:model.0.1";
        String vocabularyPath = "jar:///model/0.1/vocab.txt";

        URL vocabURL = ErnieClassification.class.getResource("/model/0.1/vocab.txt");

        Criteria<String[], Integer> criteria =
                Criteria.builder()
                        .setTypes(String[].class, Integer.class)
                        .optModelUrls("jar:///model/0.1/inference-0.1.zip")
                        //.optModelPath(Paths.get(modelUrl))
                        .optTranslator(new MyTranslator(vocabURL, 128))
                        .optEngine("PaddlePaddle")
                        .optProgress(new ProgressBar())
                        .build();

        try (ZooModel<String[], Integer> model = criteria.loadModel();
             Predictor<String[], Integer> predictor = model.newPredictor()) {
            return predictor.predict(inputs.toArray(Utils.EMPTY_ARRAY));
        }
    }

    private static final class MyTranslator implements NoBatchifyTranslator<String[], Integer> {
        private final List<String> classes = Arrays.asList("class1", "class2");
        private BertFullTokenizer tokenizer;
        private final int maxSequenceLength;
        private final String vocabularyPath;
        private final URL vocabularyURL;
        private DefaultVocabulary vocabulary;

        MyTranslator(String vocabularyPath, int maxSequenceLength) {
            this.maxSequenceLength = maxSequenceLength;
            this.vocabularyPath = vocabularyPath;
            this.vocabularyURL = null;
        }

        MyTranslator(URL vocabularyURL, int maxSequenceLength) {
            this.maxSequenceLength = maxSequenceLength;
            this.vocabularyURL = vocabularyURL;
            this.vocabularyPath = null;
        }

        @Override
        public void prepare(TranslatorContext ctx) throws IOException {
            if (StringUtils.isNotBlank(this.vocabularyPath)) {
                vocabulary =
                    DefaultVocabulary.builder()
                        .addFromTextFile(Paths.get(this.vocabularyPath))
                        .optUnknownToken("[UNK]")
                        .build();
            } else {
                vocabulary =
                    DefaultVocabulary.builder()
                        .addFromTextFile(vocabularyURL)
                        .optUnknownToken("[UNK]")
                        .build();
            }
            tokenizer = new BertFullTokenizer(vocabulary, true);
        }

        @Override
        public NDList processInput(TranslatorContext ctx, String[] inputs) throws IOException {
            NDManager inputManager = ctx.getNDManager();

            List<Long> ids = new ArrayList<>();
            List<Long> types = new ArrayList<>();

            buildParams(inputs[0], ids, types, 0L, vocabulary);
            buildParams(inputs[1], ids, types, 1L, vocabulary);

            long[] idsArr = new long[ids.size()];
            long[] typeArr = new long[types.size()];
            for (int i = 0; i < idsArr.length; i ++) {
                idsArr[i] = ids.get(i);
                typeArr[i] = types.get(i);
            }

            NDArray firstStr = inputManager.create(idsArr, new Shape(1, idsArr.length));
            NDArray secondStr = inputManager.create(typeArr, new Shape(1, typeArr.length));
            NDList params = new NDList();
            params.add(firstStr);
            params.add(secondStr);

            return params;
        }

        private void buildParams(String input, List<Long> idList, List<Long> typeList, Long type, Vocabulary vocabulary) {
            String[] inputTokens = input.split("");
            if (type.equals(0L)) {
                idList.add(1L);
                typeList.add(type);
            }
            for (String inputToken : inputTokens) {
                Long index;
                if (vocabulary.contains(inputToken)) {
                    index = vocabulary.getIndex(inputToken);
                } else {
                    index = vocabulary.getIndex("[UNK]");
                }
                idList.add(index);
                typeList.add(type);
            }
            idList.add(2L);
            typeList.add(type);
        }

        private NDArray stackInputs(List<NDList> tokenizedInputs, int index, String inputName) {
            NDArray stacked =
                    NDArrays.stack(
                            tokenizedInputs.stream()
                                    .map(list -> list.get(index).expandDims(0))
                                    .collect(Collectors.toCollection(NDList::new)));
            Shape shape = stacked.getShape();

            stacked.setName(inputName);
            return stacked;
        }

        private NDList tokenizeSingleString(NDManager manager, String input) {
            String[] inputs = input.split("\t");
            ConcurrentHashMap<String, Long> labelMap = new ConcurrentHashMap<String, Long>();
            for (int i = 0; i < classes.size(); i++) {
                labelMap.put(classes.get(i), (long) i);
            }
            List<String> tokensA = tokenizer.tokenize(inputs[1]);
            if (tokensA.size() > maxSequenceLength - 2) {
                tokensA = tokensA.subList(0, maxSequenceLength - 2);
            }

            List<String> tokens = new ArrayList<String>();
            List<Long> segmentIds = new ArrayList<Long>();
            tokens.add("[CLS]");
            segmentIds.add(0L);
            for (String token : tokensA) {
                tokens.add(token);
                segmentIds.add(0L);
            }
            tokens.add("[SEP]");
            segmentIds.add(0L);
            List<Long> inputIds = new ArrayList<Long>();
            List<Long> inputMask = new ArrayList<Long>();

            for (String token : tokens) {
                inputIds.add(tokenizer.getVocabulary().getIndex(token));
                inputMask.add(1L);
            }
            while (inputIds.size() < maxSequenceLength) {
                inputIds.add(0L);
                inputMask.add(0L);
                segmentIds.add(0L);
            }
            Long labelId = labelMap.get(inputs[0]);
            NDList outputList = new NDList();
            outputList.add(manager.create(inputIds.stream().mapToLong(l -> l).toArray()));
            outputList.add(manager.create(inputMask.stream().mapToLong(l -> l).toArray()));
            outputList.add(manager.create(segmentIds.stream().mapToLong(l -> l).toArray()));
            outputList.add(manager.create(labelId));

            return outputList;
        }

        @Override
        public Integer processOutput(TranslatorContext ctx, NDList list) {
            NDArray batchOutput = list.singletonOrThrow();
            float[] output = batchOutput.toFloatArray();
            double a = Math.exp(output[0]);
            double b = Math.exp(output[1]);
            BigDecimal aa = new BigDecimal(a);
            BigDecimal bb = new BigDecimal(b);
            BigDecimal total = aa.add(bb);
            BigDecimal rateOfZero = aa.divide(total, 8, BigDecimal.ROUND_HALF_UP);
            BigDecimal rateOfOne = bb.divide(total, 8, BigDecimal.ROUND_HALF_UP);
            BigDecimal aHalf = new BigDecimal("0.5");
            if (rateOfZero.compareTo(aHalf) > 0) {
                return 0;
            }
            if (rateOfOne.compareTo(aHalf) > 0) {
                return 1;
            }
            return 0;
        }
    }
}
