package com.nlp.visualization.service.weibo;

import com.alibaba.fastjson.JSONObject;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.nlp.visualization.common.CONSTANTS;
import com.nlp.visualization.core.format.CSVUtil;
import com.nlp.visualization.core.format.JsonUtil;
import com.nlp.visualization.core.seg.SegmentType;
import com.nlp.visualization.core.seg.filter.SegFilter;
import com.nlp.visualization.pojo.NLP.weibo.WeiboCSVEntity;
import com.nlp.visualization.pojo.NLP.weibo.WeiboSegEntity;
import com.nlp.visualization.service.tasks.WeiboSeg2SQLTask;
import com.nlp.visualization.service.tasks.WeiboSeg2TxtTask;
import com.nlp.visualization.service.tasks.WeiboSegTask;
import com.nlp.visualization.utils.MyFIleUtil;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;

import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.TimeUnit;

@Service
public class WeiboSegServiceImpl implements IWeiboSegService {


    Logger logger = LoggerFactory.getLogger(WeiboSegServiceImpl.class);


    /**
     * 将csv的数据统一为json进行处理
     *
     * @param csvFilePath
     * @param jsonFilePath
     * @param header
     */
    public void csv2Json(String csvFilePath, String jsonFilePath, String[] header) {

        logger.info("执行csv转json");
        //计算开始时间
        long start = System.currentTimeMillis();
        //调用工具类的方法直接转换
        CSVUtil.csv2json(csvFilePath, jsonFilePath, header, WeiboCSVEntity.class);
        logger.info("完成csv转json,总共耗时：" + (System.currentTimeMillis() - start) / 1000 + "s");
    }


    /**
     * 将原始的json数据的文件进行预处理
     *
     * @param inputPath
     * @param outputPath
     */
    @Override
    public void jsonPreProcess(String inputPath, String outputPath) {
        logger.info("执行微博原始的json数据预处理");
        //计算开始时间
        long start = System.currentTimeMillis();
        //调用工具类的方法进行处理
        JsonUtil.weiboJsonDataPreProcess(inputPath, outputPath);
        logger.info("完成微博原始的json数据预处理,总共耗时：" + (System.currentTimeMillis() - start) + "ms");

    }


    /**
     * 将原始的json数据文本，以txt的形式保存微博分词结果
     *
     * @param in
     * @param out
     * @param segmentType
     * @param filter
     */
    @Override
    public void seg2PlainText(File in, File out, SegmentType segmentType, SegFilter filter) {

        seg2TxtCommon(in, out, segmentType, filter);

    }


    /**
     * 将原始的json数据文本，以txt的形式保存微博分词结果
     *
     * @param in
     * @param out
     * @param segmentType
     */
    @Override
    public void seg2PlainText(File in, File out, SegmentType segmentType) {
        //构造默认的过滤器
        SegFilter.FilterBuilder builder = new SegFilter.FilterBuilder();
        SegFilter filter = builder.build();
        seg2TxtCommon(in, out, segmentType, filter);
    }


    /**
     * 将微博的json数据全部分词后，以Json形式保存
     *
     * @param weiboJsonFilePath 微博json文件的路径
     */
    public void seg2Json(String weiboJsonFilePath, String outputFilePath, SegmentType segmentType) {

        //构造默认的过滤器
        SegFilter.FilterBuilder builder = new SegFilter.FilterBuilder();
        SegFilter filter = builder.build();
        seg2JsonCommon(weiboJsonFilePath, outputFilePath, segmentType, filter);

    }


    @Override
    public void seg2Json(String weiboJsonFilePath, String outputFilePath, SegmentType segmentType, SegFilter filter) {
        seg2JsonCommon(weiboJsonFilePath, outputFilePath, segmentType, filter);
    }


    /**
     * 将微博json数据全部分词后，以xml文件的形式存储
     *
     * @param weiboJsonPath
     */
    public void seg2XMLText(String weiboJsonPath) {


    }


    /**
     * 带过滤器的SQL输出
     *
     * @param weiboJsonFilePath
     * @param outputFilePath
     * @param segmentType
     * @param filter
     */
    @Override
    public void seg2SQL(String weiboJsonFilePath, String outputFilePath, SegmentType segmentType, SegFilter filter) {
        seg2SQLCommon(weiboJsonFilePath, outputFilePath, segmentType, filter);

    }


    @Override
    public void seg2SQL(String weiboJsonFilePath, String outputFilePath, SegmentType segmentType) {
        //构造默认的过滤器
        SegFilter.FilterBuilder builder = new SegFilter.FilterBuilder();
        SegFilter filter = builder.build();
        seg2SQLCommon(weiboJsonFilePath, outputFilePath, segmentType, filter);
    }


    /**
     * 按json分词公共部分
     *
     * @param weiboJsonFilePath
     * @param outputFilePath
     * @param segmentType
     * @param filter
     */
    private void seg2JsonCommon(String weiboJsonFilePath, String outputFilePath, SegmentType segmentType, SegFilter filter) {
        long startTime = System.currentTimeMillis();
        logger.info("执行JSON格式的微博数据分词");
        //字符串缓冲区
        StringBuffer stringBuffer = new StringBuffer();
//        stringBuffer.append("[\n");
        ConcurrentLinkedQueue<WeiboSegEntity> queue = new ConcurrentLinkedQueue<>();

        //输出文件目录
        File outputFile = new File(outputFilePath);

        try {

            FileUtils.write(outputFile, "[", true);

            //读取json大文件
            File inputFile = new File(weiboJsonFilePath);
            List<WeiboCSVEntity> lines = JSONObject.parseArray(FileUtils.readFileToString(inputFile), WeiboCSVEntity.class);
            // 创建线程池
            ForkJoinPool forkJoinPool = new ForkJoinPool();
            try {
                // 提交替换的任务
                forkJoinPool.submit(new WeiboSegTask(0, lines.size(), lines, queue, segmentType, filter));
                /**    当我们调用pool.awaitTermination时，首先该方法会被阻塞，这时会执行子线程中的任务，
                 * 子线程执行完毕后该方法仍然会被阻塞，因为shutdown()方法还未被调用，而代码中将shutdown的
                 * 请求放在了awaitTermination之后，这样就导致了只有awaitTermination方法执行完毕后才会执
                 * 行shutdown请求，这样就造成了死锁。*/
                forkJoinPool.shutdown();
                forkJoinPool.awaitTermination(CONSTANTS.TASK_TIMEOUT, TimeUnit.SECONDS);//阻塞当前线程直到 ForkJoinPool 中所有的任务都执行结束

            } catch (Exception e) {
                logger.error("执行多线程分词的时候异常");
                e.printStackTrace();
            }

            writeFileByUtils(queue, outputFile);

            FileUtils.write(outputFile, "]", true);
        } catch (IOException e) {

            logger.error("seg2Json出错");
            logger.error(e.getMessage());
            e.printStackTrace();
        } catch (Exception e) {
            logger.error("未知错误");
            e.printStackTrace();
        }

        logger.info("完成JSON格式微博数据分词，共耗时" + (System.currentTimeMillis() - startTime) + "ms");
    }


    private void seg2SQLCommon(String weiboJsonFilePath, String outputFilePath, SegmentType segmentType, SegFilter filter) {
        long startTime = System.currentTimeMillis();
        logger.info("执行JSON格式的微博数据分词,目标SQL格式");
        //字符串缓冲区
        StringBuffer stringBuffer = new StringBuffer();
        ConcurrentLinkedQueue<String> queue = new ConcurrentLinkedQueue<>();

        //输出文件目录
        File outputFile = new File(outputFilePath);

        try {
            //读取json大文件
            File inputFile = new File(weiboJsonFilePath);
            List<WeiboCSVEntity> lines = JSONObject.parseArray(FileUtils.readFileToString(inputFile), WeiboCSVEntity.class);
            // 创建线程池
            ForkJoinPool forkJoinPool = new ForkJoinPool();
            try {
                // 提交替换的任务
                forkJoinPool.submit(new WeiboSeg2SQLTask(0, lines.size(), lines, queue, segmentType, filter));
                forkJoinPool.shutdown();
                forkJoinPool.awaitTermination(CONSTANTS.TASK_TIMEOUT, TimeUnit.SECONDS);//阻塞当前线程直到 ForkJoinPool 中所有的任务都执行结束

            } catch (Exception e) {
                logger.error("执行多线程分词的时候异常");
                e.printStackTrace();
            }


            writeFileByUtils_SQL(queue, outputFile);

        } catch (IOException e) {

            logger.error("seg2Json出错");
            logger.error(e.getMessage());
            e.printStackTrace();
        } catch (Exception e) {
            logger.error("未知错误");
            e.printStackTrace();
        }

        logger.info("完成JSON格式微博数据分词-目标SQL格式，共耗时" + (System.currentTimeMillis() - startTime) + "ms");
    }


    /**
     * 转txt分词格式公共部分，用于LDA训练样本
     *
     * @param in
     * @param out
     * @param segmentType
     * @param filter
     */
    private void seg2TxtCommon(File in, File out, SegmentType segmentType, SegFilter filter) {

        long startTime = System.currentTimeMillis();
        logger.info("执行JSON格式的微博数据分词,目标TXT格式");
        ConcurrentLinkedQueue<String> queue = new ConcurrentLinkedQueue<>();
        try {
//            String buf = FileUtils.readFileToString(in);
//            List<WeiboCSVEntity> lines = JSONObject.parseArray(buf, WeiboCSVEntity.class);
            List<WeiboCSVEntity> lines = MyFIleUtil.parseBigJson(in);
            // 创建线程池
            ForkJoinPool forkJoinPool = new ForkJoinPool();
            try {
                // 提交替换的任务
                forkJoinPool.submit(new WeiboSeg2TxtTask(0, lines.size(), lines, queue, segmentType, filter));
                forkJoinPool.shutdown();
                forkJoinPool.awaitTermination(CONSTANTS.TASK_TIMEOUT, TimeUnit.SECONDS);//阻塞当前线程直到 ForkJoinPool 中所有的任务都执行结束

            } catch (Exception e) {
                logger.error("执行多线程分词的时候异常");
                e.printStackTrace();
            }

            MyFIleUtil.writeQueue2File(queue, out);

        } catch (IOException e) {

            logger.error("seg2TXT出错");
            logger.error(e.getMessage());
            e.printStackTrace();
        } catch (Exception e) {
            logger.error("未知错误");
            e.printStackTrace();
        }

        logger.info("完成JSON格式微博数据分词-目标TXT格式，共耗时" + (System.currentTimeMillis() - startTime) + "ms");
    }

    /**
     * @param queue
     */
    private void writeQueue(ConcurrentLinkedQueue<WeiboSegEntity> queue, File outputFile) throws IOException {

        int bufSize = 1024;//一次读取的字节长度
        File fout = outputFile;//写出的文件
//        FileChannel fcin = new RandomAccessFile(fin, "r").getChannel();
        ByteBuffer rBuffer = ByteBuffer.allocate(bufSize);
        FileChannel fcout = new RandomAccessFile(fout, "rws").getChannel();
        ByteBuffer wBuffer = ByteBuffer.allocateDirect(bufSize);

        Gson gson = new GsonBuilder().setPrettyPrinting().create();
        int i = 0;
        int size = queue.size() - 1;
        for (WeiboSegEntity seg : queue) {
            if (size != i)
                writeFileByLine(fcout, wBuffer, gson.toJson(seg) + ",");
            else
                writeFileByLine(fcout, wBuffer, gson.toJson(seg));
            i++;
        }

        logger.info("本次写入了" + i + "条分词数据");

        if (fcout.isOpen()) {
            fcout.close();
        }

    }


    private void writeFileByLine(FileChannel fcout, ByteBuffer wBuffer, String line) {
        try {
            fcout.write(wBuffer.wrap((line).getBytes("UTF-8")), fcout.size());
        } catch (IOException e) {
            logger.error("nio写入文件错误");
            e.printStackTrace();
        }
    }


    /**
     * 通过FileUtil写入json文件
     *
     * @param queue
     * @param outputFile
     * @throws IOException
     */
    private void writeFileByUtils(ConcurrentLinkedQueue<WeiboSegEntity> queue, File outputFile) throws IOException {
        Gson gson = new GsonBuilder().setPrettyPrinting().create();
        int i = 0;
        int size = queue.size() - 1;
        for (WeiboSegEntity seg : queue) {
            if (size != i)
                FileUtils.write(outputFile, gson.toJson(seg) + ",", true);
            else
                FileUtils.write(outputFile, gson.toJson(seg), true);
            i++;
        }
    }


    /**
     * 通过FileUtil写入sql文件
     *
     * @param queue
     * @param outputFile
     * @throws IOException
     */
    private void writeFileByUtils_SQL(ConcurrentLinkedQueue<String> queue, File outputFile) throws IOException {

        for (String seg : queue) {
            FileUtils.write(outputFile, seg, true);
        }
    }

}
