package com.example.service;

import opennlp.tools.postag.POSModel;
import opennlp.tools.postag.POSTaggerME;
import opennlp.tools.tokenize.TokenizerME;
import opennlp.tools.tokenize.TokenizerModel;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import org.springframework.stereotype.Service;

import javax.annotation.PostConstruct;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.seg.Segment;
import com.hankcs.hanlp.seg.common.Term;

//@Service
//public class NlpService {
//
//    private TokenizerME tokenizer;
//    private POSTaggerME posTagger;
//
//    private final ResourceLoader resourceLoader;
//
//    @Value("classpath:opennlp-en-ud-ewt-tokens-1.0-1.9.3.bin")
//    private String tokenizerModelPath;
//
//    @Value("classpath:opennlp-en-ud-ewt-pos-1.0-1.9.3.bin")
//    private String posModelPath;
//
//    public NlpService(ResourceLoader resourceLoader) {
//        this.resourceLoader = resourceLoader;
//    }
//
//    @PostConstruct
//    public void init() throws Exception {
//        loadModels(tokenizerModelPath, posModelPath);
//    }
//
//    public void loadModels(String tokenizerModelPath, String posModelPath) throws Exception {
//        Resource tokenizerResource = resourceLoader.getResource(tokenizerModelPath);
//        Resource posResource = resourceLoader.getResource(posModelPath);
//
//        try (InputStream tokenModelStream = tokenizerResource.getInputStream();
//             InputStream posModelStream = posResource.getInputStream()) {
//            TokenizerModel tokenModel = new TokenizerModel(tokenModelStream);
//            POSModel posModel = new POSModel(posModelStream);
//
//            this.tokenizer = new TokenizerME(tokenModel);
//            this.posTagger = new POSTaggerME(posModel);
//        }
//    }
//
//    public List<String> extractKeywords(String text) {
//        String[] tokens = tokenizer.tokenize(text);
//        String[] tags = posTagger.tag(tokens);
//
//        System.out.println("Tokens: " + Arrays.toString(tokens));
//        System.out.println("Tags: " + Arrays.toString(tags));
//
//
//        List<String> keywords = new ArrayList<>();
//        for (int i = 0; i < tokens.length; i++) {
//            if (tags[i].startsWith("NN")) {
//                keywords.add(tokens[i]);
//            }
//        }
//        // 打印提取的关键词，确保它们不为空
//        System.out.println("Extracted keywords: " + keywords);
//
//        return keywords;
//    }
//}




//@Service
//public class NlpService {
//
//    public List<String> extractKeywords(String text) {
//        List<String> keywords = new ArrayList<>();
//
//        // 创建一个分词对象
//        Segment segment = HanLP.newSegment().enablePartOfSpeechTagging(true); // 启用词性标注
//
//        // 执行分词
//        List<Term> termList = segment.seg(text);
//
//        // 提取名词作为关键词
//        for (Term term : termList) {
//            if (term.nature != null && term.nature.startsWith("n")) {
//                keywords.add(term.word);
//            }
//        }
//
//        return keywords;
//    }
//}

@Service
public class NlpService {

    public List<String> extractKeywords(String text) {
        List<String> keywords = new ArrayList<>();

        // 创建一个分词对象
        Segment segment = HanLP.newSegment().enablePartOfSpeechTagging(true);

        // 执行分词
        List<Term> termList = segment.seg(text);

        // 提取关键词
        for (Term term : termList) {
            if (term.nature != null) {
                keywords.add(term.word);
            }
        }
        // 打印分词结果
        System.out.println("分词结果：");
        for (Term term : termList) {
            System.out.println(term.word);
        }

        // 如果没有提取到关键词，则添加原始输入文本作为关键词
        if (keywords.isEmpty()) {
            keywords.add(text);
        }

        return keywords;
    }
}