package com.nlp.visualization.controller.spring.controller;

import com.nlp.visualization.core.discourse.IBayesService;
import com.nlp.visualization.core.seg.SegmentType;
import com.nlp.visualization.core.seg.filter.SegFilter;
import com.nlp.visualization.pojo.NLP.seg.SegmentEntity;
import com.nlp.visualization.pojo.NLP.sen.SentenceEntity;
import com.nlp.visualization.service.IDataSegmentService;
import com.nlp.visualization.service.IDataSentenceService;
import com.nlp.visualization.service.IDataService;
import com.nlp.visualization.service.IFileService;
import org.apache.commons.collections.map.HashedMap;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;

import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.util.*;

import static com.nlp.visualization.common.CONSTANTS.ERROR_UPLOAD;
import static com.nlp.visualization.core.sentence.SentenceType.HANLP_MaxEnt;


@Controller("springFileController")
@RequestMapping("/file/upload")
public class FileController {

    @Autowired
    private IDataService dataService;

    @Autowired
    private IDataSegmentService dataSegmentService;

    @Autowired
    private IFileService fileService;

    @Autowired
    private IDataSentenceService dataSentenceService;

    @Autowired
    private IBayesService bayesService;

    Logger logger = LoggerFactory.getLogger(FileController.class);

    /**
     * 文件上传，并执行分词
     *
     * @param request
     * @param response
     * @return
     */
    @RequestMapping("seg")
    @ResponseBody
    public Map<String, Object> uploadSegFile(HttpServletRequest request, HttpServletResponse response) {

        //返回的结果
        Map<String, Object> result = new HashMap<>();
        //结果中的数据
        List<Map<String, Object>> resultSrc = new ArrayList<>();
        //上传数据的集合
        List<File> needToSeg;
        //分词所使用的过滤器
        SegFilter.FilterBuilder builder = new SegFilter.FilterBuilder();
        SegFilter segFilter = builder.enablePunctuation(false).enableStopwords(false).build();
        //文件保存的子路径
        String savePath = request.getSession().getServletContext().getRealPath("");

        try {
            needToSeg = fileService.upload(request, response, "needToSeg");
        } catch (IOException e) {
            return IOExcepction(e, result);
        }

        //将文本进行分词
        Iterator<File> iterator = needToSeg.iterator();
        while (iterator.hasNext()) {
            File tmpFile = iterator.next();
            try {
                //新建结果Map
                Map<String, Object> dataMap = new HashedMap();
                //读文件
                String fileSttring = FileUtils.readFileToString(tmpFile);
                //按句子切分
                List<String> eachLine = dataService.splitSentences(fileSttring);
                //分词
                SegmentEntity segmentEntity = dataSegmentService.seg(eachLine, SegmentType.HANLP_CRF, segFilter);
                //整个文件分词后，保存到指定路径
                File afterSegFile = dataSegmentService.segFile(tmpFile, savePath, SegmentType.HANLP_CRF, segFilter);
                //添加进结果
                dataMap.put("fileName", tmpFile.getName());
                dataMap.put("segResult", segmentEntity);
                dataMap.put("txt", fileSttring);
                dataMap.put("download", afterSegFile.getPath().split(savePath)[1]);
                resultSrc.add(dataMap);

            } catch (IOException e) {
                return IOExcepction(e, result);
            }

        }

        result.put("status", 0);
        result.put("msg", "上传成功");
        result.put("data", resultSrc);
        return result;
        // 上传结束
    }


    /**
     * 文件上传并执行句法分析
     *
     * @param request
     * @param response
     * @return
     */
    @RequestMapping("sen")
    @ResponseBody
    public Map<String, Object> uploadSenFile(HttpServletRequest request, HttpServletResponse response) {
        //返回的结果
        Map<String, Object> result = new HashMap<>();
        //结果中的数据
        List<Map<String, Object>> resultSrc = new ArrayList<>();
        //上传数据的集合
        List<File> needToSen;
        //文件保存的子路径
        String savePath = request.getSession().getServletContext().getRealPath("");

        try {
            needToSen = fileService.upload(request, response, "needToSen");
        } catch (IOException e) {
            return IOExcepction(e, result);
        }

        //将文本进行分词
        Iterator<File> iterator = needToSen.iterator();
        while (iterator.hasNext()) {
            File tmpFile = iterator.next();
            try {
                //新建结果Map
                Map<String, Object> dataMap = new HashedMap();
                //读文件
                String fileSttring = FileUtils.readFileToString(tmpFile);
                //句法分析1,这里分析的结果用于返回给前端展示
                SentenceEntity sentenceEntity = dataSentenceService.parseSingle(fileSttring, HANLP_MaxEnt);
                //句法分析2,保存到指定路径，这里分析的结果用于提供给用户下载查看
                File afterSenFile = dataSentenceService.parseFile(tmpFile, savePath, HANLP_MaxEnt);
                //添加进结果
                dataMap.put("fileName", tmpFile.getName());
                dataMap.put("senResult", sentenceEntity);
                dataMap.put("txt", fileSttring);
                dataMap.put("download", afterSenFile.getPath().split(savePath)[1]);
                resultSrc.add(dataMap);

            } catch (IOException e) {
                return IOExcepction(e, result);
            }

        }
        result.put("status", 0);
        result.put("msg", "上传成功");
        result.put("data", resultSrc);
        return result;
        // 上传结束

    }


    /**
     * 上传词云图片并返回地址
     *
     * @param request
     * @param response
     * @return
     */
    @RequestMapping("wordcloudBackground")
    @ResponseBody
    public Map<String, Object> uploadNormolImage(HttpServletRequest request, HttpServletResponse response) {
        //返回的结果
        Map<String, Object> result = new HashMap<>();
        //结果中的数据
        List<Map<String, Object>> resultSrc = new ArrayList<>();
        //上传数据的集合
        List<File> fileList;
        //文件保存的子路径
        String savePath = request.getSession().getServletContext().getRealPath("");

        try {
            fileList = fileService.upload(request, response, "wordcloudImages");
            int count = 0;
            for (File f : fileList) {
                Map dataMap = new HashedMap();
                dataMap.put("fileName", f.getName());
                dataMap.put("download", f.getPath().split(savePath)[1]);
                resultSrc.add(count, dataMap);
                count++;
            }
        } catch (IOException e) {
            return IOExcepction(e, result);
        }

        result.put("status", 0);
        result.put("msg", "上传成功");
        result.put("data", resultSrc);
        return result;
        // 上传结束

    }


    /**
     * 上传贝叶斯的语料
     *
     * @param request
     * @param response
     * @return
     */
    @RequestMapping("/bayes")
    @ResponseBody
    public Map<String, Object> uploadBayesFile(HttpServletRequest request, HttpServletResponse response) {
        //返回的结果
        Map<String, Object> result = new HashMap<>();
        //结果中的数据
        List<Map<String, Object>> resultSrc = new ArrayList<>();
        //上传数据的集合
        List<File> needToTrainByBayes;
        //训练Map
        Map<String, String[]> trainMap = new HashedMap();
        //文件保存的子路径
        String savePath = request.getSession().getServletContext().getRealPath("");

        try {
            needToTrainByBayes = fileService.upload(request, response, "bayes");
        } catch (IOException e) {
            return IOExcepction(e, result);
        }

        //
        Iterator<File> iterator = needToTrainByBayes.iterator();
        while (iterator.hasNext()) {
            File tmpFile = iterator.next();
            try {
                //新建结果Map
                Map<String, Object> dataMap = new HashedMap();
                //读文件
                List list = FileUtils.readLines(tmpFile);
                String[] fileSttring = FileUtils.readLines(tmpFile).toArray(new String[list.size()]);
                //添加进map作为等待训练
                trainMap.put(tmpFile.getName().split("_")[1], fileSttring);
                //添加进结果
                dataMap.put("fileName", tmpFile.getName().split("_")[1]);
                dataMap.put("content", FileUtils.readFileToString(tmpFile));
                resultSrc.add(dataMap);

            } catch (IOException e) {
                return IOExcepction(e, result);
            }

        }
//        //开始训练
//        String tag = bayesService.trainClassifier("BayesTraining", trainMap);
        result.put("status", 0);
        result.put("msg", "上传成功");
//        result.put("tag", tag);
        result.put("data", resultSrc);
        return result;
        // 上传结束

    }


    private Map<String, Object> IOExcepction(IOException e, Map<String, Object> result) {
        result.put("status", ERROR_UPLOAD);
        result.put("msg", "上传异常");
        logger.error("上传异常");
        e.printStackTrace();
        return result;
    }


}
