package com.thd.kc.service;

import com.kennycason.kumo.CollisionMode;
import com.kennycason.kumo.WordCloud;
import com.kennycason.kumo.WordFrequency;
import com.kennycason.kumo.bg.PixelBoundryBackground;
import com.kennycason.kumo.font.KumoFont;
import com.kennycason.kumo.font.scale.LinearFontScalar;
import com.kennycason.kumo.nlp.FrequencyAnalyzer;
import com.kennycason.kumo.palette.ColorPalette;
import com.thd.kc.bean.Pair;
import com.thd.kc.constants.DictClassifyEnum;
import com.thd.kc.dao.JdbcDao;
import com.thd.kc.dto.ArticleDto;
import com.thd.kc.dto.AttachDto;
import com.thd.kc.utils.*;
import com.thd.kc.vuedto.VueSelectDto;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.*;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.wltea.analyzer.lucene.IKAnalyzer;

import java.awt.*;
import java.io.File;
import java.io.StringReader;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.List;
import java.util.stream.Collectors;

@Service
public class KcServiceImpl implements KcService {

    public Logger logger = LoggerFactory.getLogger(this.getClass());

    @Autowired
    private JdbcDao jdbcDao;



    private ArticleDto transformToPmpItem(Map<String,Object> mapItem){
        ArticleDto item = new ArticleDto();
        try{
            BeanUtils.copyProperties(item,mapItem);
        }catch (Exception e){
            throw new RuntimeException("Transform Map To ArticleDto error");
        }

//        item.setId(StringUtils.getStr(result.get("id")));
//        item.setClassify(StringUtils.getStr(result.get("classify")));
//        item.setTitle(StringUtils.getStr(result.get("title")));
//        item.setKeywords(StringUtils.getStr(result.get("keywords")));
//        item.setContent(StringUtils.getStr(result.get("content")));
//        item.setCreateTime(StringUtils.getStr(result.get("create_time")));
//        item.setModifyTime(StringUtils.getStr(result.get("modify_time")));

        return item;
    };

    @Override
    public ArticleDto saveArticle(ArticleDto articleDto) {
        // 生成id
        articleDto.setId(UuidUtils.uuid());

        articleDto.setCreateTime(DateUtils.dateToString(new Date(),"yyyy-MM-dd HH:mm:ss"));

        articleDto.setModifyTime(DateUtils.dateToString(new Date(),"yyyy-MM-dd HH:mm:ss"));

        String sql = " insert into kc_article " +
                "(id,title,keywords,classify,tags,content,create_time,modify_time,is_delete) " +
                " values " +
                "(?,?,?,?,?,?,?,?,?)";

        jdbcDao.execute(sql, new Object[]{
                articleDto.getId(),
                articleDto.getTitle(),
                articleDto.getKeywords(),
                articleDto.getClassify(),
                articleDto.getTags(),
                articleDto.getContent(),
                articleDto.getCreateTime(),
                articleDto.getModifyTime(),
                0
        });

        this.indexArticle(articleDto);

        return articleDto;
    }


    private void dealDictBatch(String dictClassify,String dicts){
        if(StringUtils.isNotEmpty(dicts)) {
            List<String> l = Arrays.asList(dicts.split(","));

            if (ListUtils.isNotEmpty(l)) {
                l.stream().forEach(item -> this.dealDict(dictClassify, item));
            }
        }


    };

    private void dealDict(String dictClassify,String dict){
        String sql = "select count(1) as ct from kc_dict where classify = ? and id = ? ";
        List<Map<String,Object>> ctList = this.jdbcDao.query(sql,new Object[]{dictClassify,dict});
        boolean isExist = false;
        if(ListUtils.isNotEmpty(ctList)  ){
           if(Integer.valueOf(ctList.get(0).get("ct").toString()) > 0){
               isExist = true;
           }
        }

        if(!isExist){
            String insertSql = " insert into kc_dict(id,title,classify) values (?,?,?)";
            this.jdbcDao.execute(insertSql,new Object[]{dict,dict,dictClassify});
        }
    }

    @Override
    public ArticleDto updateArticle(ArticleDto articleDto) {
        AssertUtils.stringIsNotEmpty(articleDto.getId(),"id can not be empty");

        articleDto.setModifyTime(DateUtils.dateToString(new Date(),"yyyy-MM-dd HH:mm:ss"));


        String sql = " update kc_article " +
                " set " +
                " title = ? , " +
                " keywords = ? ," +
                " classify = ? , " +
                " tags = ? , " +
                " content = ? , " +
                " create_time = ? , " +
                " modify_time = ? " +
                " where " +
                " id = ? ";
        jdbcDao.execute(sql, new Object[]{
                articleDto.getTitle(),
                articleDto.getKeywords(),
                articleDto.getClassify(),
                articleDto.getTags(),
                articleDto.getContent(),
                articleDto.getCreateTime(),
                articleDto.getModifyTime(),
                articleDto.getId()
        });


        this.indexArticle(articleDto);
        return articleDto;
    }

    @Override
    public ArticleDto queryArticleById(String id) {

        String sql = " select " +
                " id as id, " +
                " title as title, " +
                " keywords as keywords, " +
                " classify as classify, " +
                " tags as tags, " +
                " content as content, " +
                " create_time as createTime, " +
                " modify_time as modifyTime " +
                " from kc_article " +
                " where id = ?";
        List<Map<String,Object>> result = jdbcDao.query(sql, new Object[]{id});
        return result.size() > 0 ? transformToPmpItem(result.get(0)) : null;

    }

    @Override
    public ArticleDto saveOrUpdateArticle(ArticleDto articleDto) {

        this.dealDictBatch(DictClassifyEnum.TAG.name(), articleDto.getTags());

        if(StringUtils.isEmpty(articleDto.getId())){
            return this.saveArticle(articleDto);
        }else{
            return this.updateArticle(articleDto);
        }
    }

    private Pair<String, List<Object>> createArticleQuerySql(ArticleDto articleDto){
        String sql = " select " +
                " id as id, " +
                " title as title, " +
                " keywords as keywords, " +
                " classify as classify, " +
                " tags as tags, " +
                " content as content, " +
                " create_time as createTime, " +
                " modify_time as modifyTime " +
                " from kc_article " +
                " where is_delete = 0 ";
        List<Object> params = new ArrayList<Object>();
        if(StringUtils.isNotEmpty(articleDto.getId())){
            sql += " and id = ? ";
            params.add(articleDto.getId());
        }
        if(StringUtils.isNotEmpty(articleDto.getSearchTags())){
            sql += " and tags like ? ";
            params.add("%" + articleDto.getSearchTags() + "%");
        }

        if(StringUtils.isNotEmpty(articleDto.getTitle())){
            sql += " and title like ? ";
            params.add("%" + articleDto.getTitle() + "%");
        }
        if(StringUtils.isNotEmpty(articleDto.getKeywords())){
            sql += " and keywords like ? ";
            params.add("%" + articleDto.getKeywords() + "%");
        }
        if(StringUtils.isNotEmpty(articleDto.getClassify())){
            sql += " and classify like ? ";
            params.add("%" + articleDto.getClassify() + "%");
        }
        if(StringUtils.isNotEmpty(articleDto.getContent())){
            sql += " and content like ? ";
            params.add("%" + articleDto.getContent() + "%");
        }
        if (articleDto.getKw() != null) {
            sql += " and  ( title like ? or keywords like ? or content like ? ) ";
            params.add("%" + articleDto.getKw() + "%");
            params.add("%" + articleDto.getKw() + "%");
            params.add("%" + articleDto.getKw() + "%");
        }
        sql += " order by create_time desc ";


        return new Pair<String, List<Object>>(sql, params);
    }
    @Override
    public List<ArticleDto> queryArticleListByDb(ArticleDto articleDto){
        Pair<String, List<Object>> pair = this.createArticleQuerySql(articleDto);
        String sql = pair.getLeft();
        List<Object> params = pair.getRight();

        if(articleDto.getCurrentPage() >= 0){
            sql += " limit ? offset ? ";
            if(null == articleDto.getPageSize() || articleDto.getPageSize() <= 0){
                articleDto.setPageSize(3);
            }
            params.add(articleDto.getPageSize());
            params.add(articleDto.getCurrentPage() * articleDto.getPageSize());
        }

        List<Map<String,Object>> result = jdbcDao.query(sql, params.toArray());
        if(ListUtils.isNotEmpty( result)){
            return result.stream().map(this::transformToPmpItem).collect(Collectors.toList());
        }
        return new ArrayList<ArticleDto>();
    };


    /**
     * 查询article列表
     * @return
     */
    public List<ArticleDto> queryArticleListByLucene(ArticleDto articleDto){
        List<ArticleDto> r = new ArrayList<ArticleDto>();

        IndexSearcher indexSearcher = null;

        try {
            IndexReader indexReader = DirectoryReader.open(LuceneUtils.INDEX_DIRECTORY);
            indexSearcher = new IndexSearcher(indexReader);

            ScoreDoc[] scoreDocs = createQueryByLucene(articleDto,indexSearcher);
            System.out.println("有【"+ scoreDocs.length + "】条结果");


            for (ScoreDoc scoreDoc : scoreDocs) {
                int doc = scoreDoc.doc;
                // document由一堆Field的集合组成
                Document document = indexSearcher.doc(doc);
                // 文章id
                String id = document.get("id");
                r.add(this.queryArticleById(id));

            }
            // 第七步：关闭IndexReader对象
            indexSearcher.getIndexReader().close();
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                if(null != indexSearcher){
                    indexSearcher.getIndexReader().close();
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
        return r;
    };

    @Override
    public Integer queryArticleListTotalByDb(ArticleDto articleDto){

        Pair<String, List<Object>> pair = this.createArticleQuerySql(articleDto);
        String sql = pair.getLeft();
        List<Object> params = pair.getRight();

        String countSql = "select count(1) as ct from ( " + sql + " )  ";

        List<Map<String,Object>> result = jdbcDao.query(countSql, params.toArray());
        if(ListUtils.isEmpty( result)){
            return new Integer(0);
        }
        return Integer.parseInt(result.get(0).get("ct").toString());

    };



    @Override
    public Integer queryArticleListTotalByLucene(ArticleDto articleDto){
        List<ArticleDto> r = new ArrayList<ArticleDto>();
        IndexSearcher indexSearcher = null;
        try {

            IndexReader indexReader = DirectoryReader.open(LuceneUtils.INDEX_DIRECTORY);
            indexSearcher = new IndexSearcher(indexReader);



            ScoreDoc[] scoreDocs = createQueryByLucene(articleDto,indexSearcher);
            System.out.println("有【"+ scoreDocs.length + "】条结果");


            return scoreDocs.length;
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                if(null != indexSearcher){
                    indexSearcher.getIndexReader().close();
                }
            } catch (Exception e) {
                e.printStackTrace();
            }
            return new Integer(0);
        }


    };

    private ScoreDoc[] createQueryByLucene(ArticleDto articleDto,IndexSearcher indexSearcher){

        try{
            String keyword = articleDto.getKw();


            {// ====== 分词测试
                IKAnalyzer thisAnalysis = new IKAnalyzer(false);
                StringReader reader = new StringReader(keyword);
                TokenStream ts = thisAnalysis.tokenStream("", reader);
                System.out.println(ts);
                CharTermAttribute termAtt = ts.getAttribute(CharTermAttribute.class);
                ts.reset();
                System.out.println("======================  关键词分词结果： ======================");
                while (ts.incrementToken()) {
                    System.out.println(termAtt.toString());
                }
                ts.close();
                System.out.println(" ==================================================================");
            }

            BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder();

            if(StringUtils.isNotEmpty(keyword)) {
                QueryParser query = new QueryParser("content", LuceneUtils.ANALYZER);
                // 拆分的关键词之间用AND关系
                query.setDefaultOperator(articleDto.getLogicalSplitWord() ? QueryParser.Operator.AND : QueryParser.Operator.OR);
                // 添加查询条件(或者)
                booleanQueryBuilder.add(query.parse( keyword), BooleanClause.Occur.SHOULD);

                QueryParser queryTitle = new QueryParser("title", LuceneUtils.ANALYZER);
                // 拆分的关键词之间用AND关系
                queryTitle.setDefaultOperator(articleDto.getLogicalSplitWord() ? QueryParser.Operator.AND : QueryParser.Operator.OR);
                // 添加查询条件(或者)
                booleanQueryBuilder.add(queryTitle.parse(keyword), BooleanClause.Occur.SHOULD);
            }




            if(StringUtils.isNotEmpty(articleDto.getSearchTags())){
                Query queryTags = new QueryParser( "tags", LuceneUtils.ANALYZER).parse(articleDto.getSearchTags());
                booleanQueryBuilder.add(queryTags, BooleanClause.Occur.MUST);
            }

            if(StringUtils.isNotEmpty(articleDto.getClassify())){
                TermQuery queryClassify = new TermQuery( new Term("classify",articleDto.getClassify()));
                booleanQueryBuilder.add(queryClassify, BooleanClause.Occur.MUST);
            }

            Query querySummary = booleanQueryBuilder.build();

            System.out.println("查询语法: " + querySummary);




            // 第五步：执行查询。
            TopDocs topDocs = indexSearcher.search(querySummary, 10);

            ScoreDoc[] scoreDocs = topDocs.scoreDocs;

            return scoreDocs;
        } catch (Exception e) {
            e.printStackTrace();
            return new ScoreDoc[]{};
        }
    }


    public void deleteArticle(String id){
        String sql = "update kc_article set is_delete = 1 where id = ? ";
        jdbcDao.execute(sql, new Object[]{id});

        // 删除全文检索的索引
        IndexWriter indexWriter = null;
        try {
            IKAnalyzer analyzer = new IKAnalyzer();
            IndexWriterConfig config = new IndexWriterConfig(analyzer);
            indexWriter = new IndexWriter(LuceneUtils.INDEX_DIRECTORY, config);
            Query articleQuery = new TermQuery(new Term("id",id));
            indexWriter.deleteDocuments(articleQuery);
        } catch (Exception e) {
            e.printStackTrace();
        }finally {
            try{
                indexWriter.close();
            }catch (Exception e){

            }
        }






    };


    public List<VueSelectDto> queryDict(String classify){
        String sql = " select id , title from kc_dict where classify = ? ";
        List<Map<String,Object>> result = jdbcDao.query(sql, new Object[]{classify});
        return result.stream().map(item -> new VueSelectDto(item.get("id").toString(),item.get("title").toString())).collect(Collectors.toList());

    };

    public List<VueSelectDto> queryArticleTags(String classify,String kw){
        if(StringUtils.isEmpty(kw)){
            return this.queryDict(classify);
        }
        String sql = " select id , title from kc_dict where classify = ? and title like ? ";
        List<Map<String,Object>> result = jdbcDao.query(sql, new Object[]{classify,"%"+kw+"%"});
        return result.stream().map(item -> new VueSelectDto(item.get("id").toString(),item.get("title").toString())).collect(Collectors.toList());

    };

    public void indexArticle(ArticleDto articleDto){
        System.out.println("索引文章:" + articleDto.getTitle());
        IndexWriter indexWriter = null;
        try{

            IKAnalyzer analyzer = new IKAnalyzer();
            IndexWriterConfig config = new IndexWriterConfig(analyzer);
            indexWriter = new IndexWriter(LuceneUtils.INDEX_DIRECTORY, config);



            Query articleQuery = new TermQuery(new Term("id",articleDto.getId()));


            IndexReader indexReader = DirectoryReader.open(LuceneUtils.INDEX_DIRECTORY);
            IndexSearcher indexSearcher = new IndexSearcher(indexReader);

            TopDocs topDocs = indexSearcher.search(articleQuery,1);

            // 创建一个Document对象
            Document doc = new Document();

            // ==== 下面创建字段，类似保存数据库时设置字段的值
            /*
               类型                         分词              索引              存储
            TextField                       y                  y               y|n
            StringField                     n                  y               y|n
            StoredField                     n                  n               y
            LongField                       y                  y               y|n
            */

            // id字段,不需要分词，需要进行索引也需要保存所以用StringField
            Field idField = new StringField("id",articleDto.getId(), Field.Store.YES);
            doc.add(idField);

            // title字段,需要分词，需要进行索引也需要保存所以用TextField
            Field titleField = new TextField("title",articleDto.getTitle(), Field.Store.YES);
            doc.add(titleField);

            if (StringUtils.isNotEmpty(articleDto.getClassify())) {
                // classify字段,不需要分词，需要进行索引也需要保存所以用StringField
                Field classifyField = new StringField("classify", articleDto.getClassify(), Field.Store.YES);
                doc.add(classifyField);
            }

            if(StringUtils.isNotEmpty(articleDto.getTags())){
                // tags字段,需要分词，需要进行索引也需要保存所以用TextField
                Field tagsField = new TextField("tags",articleDto.getTags(), Field.Store.YES);
                doc.add(tagsField);
            }

            // content字段,为需要分词，需要进行索引也需要保存所以用TextField
            Field contentField = new TextField("content",articleDto.getContent(), Field.Store.YES);
            doc.add(contentField);

            if( topDocs.scoreDocs.length > 0){ // 索引存在 则更新
                indexWriter.updateDocument(new Term("id", articleDto.getId()), doc);
            }else{ // 索引不存在 则保存
                indexWriter.addDocument(doc);
            }

            indexWriter.close();
        }catch (Exception e){
            e.printStackTrace();;
        }finally {
            try{
                indexWriter.close();
            }catch (Exception e){

            }
        }



    };


    public void indexAllArticle(){
        String sql = "select id from kc_article";
        List<Map<String,Object>> result = jdbcDao.query(sql, new Object[]{});
        for(Map<String,Object> item : result){
            this.indexArticle(this.queryArticleById(item.get("id").toString()));
        }
    };


    public void indexArticleById(String id){
        this.indexArticle(this.queryArticleById(id));
    };


    public void createWordCloud(ArticleDto articleDto) throws Exception{
        final FrequencyAnalyzer frequencyAnalyzer = new FrequencyAnalyzer();
        frequencyAnalyzer.setWordFrequenciesToReturn(300);
        frequencyAnalyzer.setMinWordLength(2);
        frequencyAnalyzer.setStopWords(new ArrayList<String>(){{add("创新");add("数据");}});
        //设置词的分词器，这里使用了中文的分词器 ChineseWordTokenizer，用于将中文文本拆分成单个词语。
        frequencyAnalyzer.setWordTokenizer(new MyTokenizer());

        // 语料库
        List<String> texts = new ArrayList<String>();
        texts.add(articleDto.getContent());
        final List<WordFrequency> wordFrequencies = frequencyAnalyzer.load(texts);

        final Dimension dimension = new Dimension(500, 312);
        final WordCloud wordCloud = new WordCloud(dimension, CollisionMode.RECTANGLE);
        // 设置字体 此处不设置会出现中文乱码 字体设置为仿宋
        wordCloud.setKumoFont(new KumoFont(new Font("仿宋", Font.BOLD, 25)));
        wordCloud.setPadding(0);
//        wordCloud.setBackground(new PixelBoundryBackground("D:\\work\\gitee\\javatest-root\\javatest-wordcloud\\src\\main\\java\\com\\thd\\wordcloud\\whale_small.png"));
        wordCloud.setColorPalette(new ColorPalette(new Color(0x4055F1), new Color(0x408DF1), new Color(0x40AAF1), new Color(0x40C5F1), new Color(0x40D3F1), new Color(0xFFFFFF)));
        wordCloud.setFontScalar(new LinearFontScalar(10, 40));
        wordCloud.build(wordFrequencies);


        String imgPath = "";

        String jarPathStr = System.getProperty("user.dir");
        logger.info("jar file path : " + jarPathStr);

        Path jarPath = new File(jarPathStr).toPath();
        String folderFullPath = jarPathStr + File.separator + "localfile"+ File.separator + "mdimgs" + File.separator + articleDto.getId() ;
        File imgFolder = new File(folderFullPath);
        if(!imgFolder.exists()){
            imgFolder.mkdirs();
        }
        wordCloud.writeToFile(folderFullPath + File.separator + "wc.png");
    };


    public void saveAttach(AttachDto dto){


        String sql = "insert into kc_attach(id,relaId,classify,file_name,file_path,suffix,remark,is_deleted) values (?,?,?,?,?,?,?,?)";

        Object[] params = new Object[]{
                dto.getId(),
                dto.getRelaId(),
                dto.getClassify(),
                dto.getFileName(),
                dto.getFilePath(),
                dto.getSuffix(),
                dto.getRemark(),
                0
        };

        jdbcDao.execute(sql,params);
    };


    public void deleteAttach(String id){
        String sql = "update kc_attach set is_deleted = 1 where id = ? ";
        jdbcDao.execute(sql, new Object[]{id});
    };

    public List<AttachDto> queryAttachListByRelaIdAndClassify(String relaId,String classify){
        AssertUtils.stringIsNotEmpty(relaId,"RelaId can not be empty");
        AssertUtils.stringIsNotEmpty(classify,"Classify can not be empty");
        String sql = " select id,relaId,classify,file_name,file_path,suffix,remark,is_deleted " +
                " from kc_attach where is_deleted = 0 and relaId = ? and classify = ? ";
        return jdbcDao.query(sql, new Object[]{relaId,classify}).stream().map(item -> {
            AttachDto dto = new AttachDto();
            dto.setId(item.get("id").toString());
            dto.setRelaId(StringUtils.getStr(item.get("relaId")));
            dto.setClassify((StringUtils.getStr(item.get("classify"))));
            dto.setFileName((StringUtils.getStr(item.get("file_name"))));
            dto.setFilePath((StringUtils.getStr(item.get("file_path"))));
            dto.setSuffix((StringUtils.getStr(item.get("suffix"))));
            dto.setRemark((StringUtils.getStr(item.get("remark"))));
            return dto;
        }).collect(Collectors.toList());
    };


    public AttachDto queryAttachById(String attachId){
        String sql = " select id,relaId,classify,file_name,file_path,suffix,remark,is_deleted " +
                " from kc_attach where is_deleted = 0 and id = ?  ";
        List<Map<String,Object>> r = jdbcDao.query(sql, new Object[]{attachId});
        if(ListUtils.isNotEmpty(r)){

            Map<String,Object> item = r.get(0);

            AttachDto dto = new AttachDto();
            dto.setId(item.get("id").toString());
            dto.setRelaId(StringUtils.getStr(item.get("relaId")));
            dto.setClassify((StringUtils.getStr(item.get("classify"))));
            dto.setFileName((StringUtils.getStr(item.get("file_name"))));
            dto.setFilePath((StringUtils.getStr(item.get("file_path"))));
            dto.setSuffix((StringUtils.getStr(item.get("suffix"))));
            dto.setRemark((StringUtils.getStr(item.get("remark"))));
            return dto;

        }else{
            return null;
        }
    };
}



