package com.mo;

import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.io.IoUtil;
import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.classification.models.AbstractModel;
import lombok.Data;
import lombok.SneakyThrows;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;

/**
 * @author 墨渐生微
 * @program mo-all
 * @apiNote
 * @since 2023/5/15
 */
public class NlpClassifyDemo {


    static class NlpClassifier {

        private NlpModel nlpModel;

        public void train(String folderPath) throws IOException {
            if (folderPath == null)
                throw new IllegalArgumentException("参数 folderPath == null");

            File root = new File(folderPath);
            if (!root.exists())
                throw new IllegalArgumentException(String.format("目录 %s 不存在", root.getAbsolutePath()));
            if (!root.isDirectory())
                throw new IllegalArgumentException(String.format("目录 %s 不是一个目录", root.getAbsolutePath()));


            File[] folders = root.listFiles();

            if (folders == null) {
                return;
            }

            List<ClassifyCorpus> classifiers = new ArrayList<>();

            for (File folder : folders) {
                if (folder.isFile()) {
                    continue;
                }
                // classifier
                ClassifyCorpus classifyModel = recursiveReduce(folder);
                classifiers.add(classifyModel);
            }
            NlpModel nlpModel = new NlpModel();
            nlpModel.setClassifiers(classifiers);

            this.nlpModel = nlpModel;
        }

        private ClassifyCorpus recursiveReduce(File rootDir) {

            ClassifyCorpus classifyModel = new ClassifyCorpus();
            classifyModel.setCatalog(rootDir.getName());

            // 语料
            List<String> corpus = new ArrayList<>();
            List<ClassifyCorpus> subset = new ArrayList<>();


            File[] files = rootDir.listFiles();
            if (files == null) {
                return classifyModel;
            }

            for (File file : files) {
                if (file.isFile()) {
                    corpus.add(IoUtil.readUtf8(IoUtil.toStream(file)));
                    continue;
                }
                ClassifyCorpus subsetModel = recursiveReduce(file);
                subset.add(subsetModel);
            }
            classifyModel.setCorpus(corpus);
            classifyModel.setSubset(subset);
            return classifyModel;
        }

        public NlpModel getModel() {
            return this.nlpModel;
        }


        /**
         * 分类
         *
         * @param text
         * @return
         */
        public String classify(String text) {

            List<String> words = segment(text);


            List<ClassifyCorpus> scoreResult = new ArrayList<>();


            for (ClassifyCorpus classifier : nlpModel.classifiers) {
                ClassifyCorpus classifyCorpus = predict(classifier, words);
                scoreResult.add(classifyCorpus);
            }
            // 计算命中最多
            ClassifyCorpus maxScoreResult = scoreResult
                    .stream()
                    .filter(Objects::nonNull)
                    .max(Comparator.comparing(ClassifyCorpus::getHit))
                    .orElseThrow(() -> new RuntimeException("require check unknown classify: " + text));

            return maxScoreResult.getCatalog();
        }


        private ClassifyCorpus predict(ClassifyCorpus classifier, List<String> words) {
            List<ClassifyCorpus> subset = classifier.getSubset();

            // depth
            if (subset != null && subset.size() > 0) {

                List<ClassifyCorpus> subsetResult = new ArrayList<>();
                for (ClassifyCorpus subsetClassify : subset) {
                    ClassifyCorpus predict = predict(subsetClassify, words);
                    subsetResult.add(predict);
                }

                // reduce
                subsetResult = subsetResult
                        .stream()
                        .filter(o -> !Objects.equals(o.getHit(), 0))
                        .collect(Collectors.toList());

                // 多个子集命中, 则为父级
                if (subsetResult.size() > 1) {
                    int hit = subsetResult.stream().mapToInt(ClassifyCorpus::getHit).sum();
                    classifier.setHit(hit);
                    return classifier;
                }

                return CollectionUtil.getFirst(subsetResult);
            }
            // no subset
            int hit = hit(words, classifier.getCorpus());
            classifier.setHit(hit);
            return classifier;
        }


        private int hit(List<String> words, List<String> corpus) {
            int i = 0;
            for (String word : words) {
                if (corpus.contains(word)) {
                    i++;
                }
            }
            return i;
        }


        public List<String> segment(String text) {
            return HanLP.newSegment()
                    .seg(text.toCharArray())
                    .stream()
                    .map(term -> {
                        if (
                            // 助词
                                term.nature.startsWith("u") ||
                                        // 代词
                                        term.nature.startsWith("r") ||
                                        // 介词
                                        term.nature.startsWith("p") ||
                                        // 连词
                                        term.nature.startsWith("c") ||
                                        // 标点符号
                                        term.nature.startsWith("w") ||
                                        // 数词
                                        term.nature.startsWith("m")
                        ) {
                            return null;
                        }
                        return term.word;
                    }).filter(Objects::nonNull)
                    .collect(Collectors.toList());
        }
    }

    @Data
    static class NlpModel extends AbstractModel {
        List<ClassifyCorpus> classifiers;
    }


    @Data
    static class ClassifyCorpus {

        /**
         * 类目
         */
        private String catalog;

        /**
         * 语料
         */
        private List<String> corpus;

        /**
         * 子集
         */
        List<ClassifyCorpus> subset;

        /**
         * 命中数
         */
        private transient int hit = 0;
    }

    @SneakyThrows
    public static void main(String[] args) {
        NlpClassifier classifier = new NlpClassifier();
//        classifier.train("D:\\hanlp\\data\\train\\nlp-classifier");
//
//        System.out.println(classifier.classify("'chic-her.com'网站是一个中国时尚女装网站，销售各种类型的女装。它销售的商品包括长裙、裤装、套装、连衣裙、外套、卫衣和T恤等。"));
//        System.out.println(classifier.classify("男装"));
//
//        System.out.println(classifier.classify("彩妆化妆小工具"));

        System.out.println(classifier.segment("通过搜索 'lichenous.shop' 这个网站，可以发现该网站是一家线上商店，主要销售户外运动装备、配饰和工具等相关产品。其产品涵盖登山、露营、越野、钓鱼、滑雪等多个领域，覆盖了很广的户外爱好人士群体。"));

    }
}
