package com.moon.back.service.impl.handler;

import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.corpus.dependency.CoNll.CoNLLSentence;
import com.hankcs.hanlp.corpus.dependency.CoNll.CoNLLWord;
import com.hankcs.hanlp.dependency.IDependencyParser;
import com.hankcs.hanlp.dependency.nnparser.NeuralNetworkDependencyParser;
import com.hankcs.hanlp.seg.common.Term;
import com.hankcs.hanlp.tokenizer.StandardTokenizer;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class CommandParser {


    static {
        HanLP.Config.NNParserModelPath  = "D:\\bishe\\moon_back\\moon_db\\data\\model\\dependency\\NNParserModel.txt";
        HanLP.Config.PerceptronCWSModelPath = "D:\\bishe\\moon_back\\moon_db\\data\\model\\perceptron\\large\\cws.bin";
        HanLP.Config.PerceptronPOSModelPath = "D:\\bishe\\moon_back\\moon_db\\data\\model\\perceptron\\pku1998\\pos.bin";
        HanLP.Config.PerceptronNERModelPath = "D:\\bishe\\moon_back\\moon_db\\data\\model\\perceptron\\pku1998\\ner.bin";

    }
    private static final IDependencyParser parser = new NeuralNetworkDependencyParser();

    public static void main(String[] args) {



        String text = "删除名字包含test的文件夹";
        // 防御性编程，做一件很蠢的事情
        // 对语句复杂度进行分析

        if(text.length() >50){
            System.out.println("无法解析，指令太复杂 ^~^");
            return;
        }


        Map<String, Object> result = parseCommand(text);
        System.out.println(result.size());
        System.out.println("解析结果：\n" + formatResult(result));
    }

    public static Map<String, Object> parseCommand(String text) {
        List<Term> terms = StandardTokenizer.segment(text);
        for (Term term : terms) {
            System.out.println( term.word+"---" +term.nature );
        }
        CoNLLSentence sentence = parser.parse(terms);
        CoNLLWord[] wordArray = sentence.getWordArray();
        System.out.println("====================================");
        for (CoNLLWord word : wordArray) {
            System.out.println(word.LEMMA + ":" + word.POSTAG);
        }

        Map<String, Object> result = new HashMap<>();
        result.put("operation", extractVerb(terms, sentence));

        Map<String, Object> target = new HashMap<>();
        target.put("type", extractObject(sentence));
        target.put("conditions", extractConditions(sentence));
        result.put("target", target);

        return result;
    }

    // 提取动词（兼容 1.8.3 的词性标签访问方式）
    private static String extractVerb(List<Term> terms, CoNLLSentence sentence) {
        for (CoNLLWord word : sentence.getWordArray()) {
            // 注意：1.8.3 版本中词性标签字段是 POSTAG（字符串类型） postag
            if ("VV".equals(word.POSTAG)) {
                // ID从1开始
                return terms.get(word.ID - 1).word;
            }
        }
        return "未知操作";
    }

    // 提取受事对象
    private static String extractObject(CoNLLSentence sentence) {
        for (CoNLLWord word : sentence.getWordArray()) {
            if ("受事".equals(word.DEPREL)) {
                return word.LEMMA;
            }
        }
        return "未知目标";
    }

    // 提取条件
    private static List<String> extractConditions(CoNLLSentence sentence) {
        List<String> conditions = new ArrayList<>();
        for (CoNLLWord word : sentence.getWordArray()) {
            if (word.DEPREL != null && word.DEPREL.startsWith("修饰")) {
                conditions.add(word.HEAD.LEMMA + word.LEMMA);
            }
        }
        return conditions;
    }

    // 结果格式化（保持不变）
    private static String formatResult(Map<String, Object> result) {
        StringBuilder sb = new StringBuilder();
        sb.append("操作类型: ").append(result.get("operation")).append("\n");
        Map<?, ?> target = (Map<?, ?>) result.get("target");
        sb.append("目标类型: ").append(target.get("type")).append("\n");
        sb.append("条件: \n");
        ((List<?>) target.get("conditions")).forEach(cond -> sb.append(" - ").append(cond).append("\n"));
        return sb.toString();
    }
}