package cn.ucox.web.ms.monitor;

import cn.ucox.web.ms.Bootstarpable;
import cn.ucox.web.ms.Constant;
import cn.ucox.web.ms.components.ftp.DexFtpClient;
import cn.ucox.web.ms.config.DataExchangeConfig.ExchangeConfig;
import cn.ucox.web.ms.redis.RedisClient;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.net.ftp.FTPClient;
import org.apache.commons.pool2.PooledObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.Tuple;

import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;

import static cn.ucox.web.ms.Constant.LOG_PARSE;

/**
 * FTP同步文件解析器
 *
 * @author chenw
 * @create 2017-06-29 14:19
 * @email javacspring@gmail.com
 */
public class SyncFileParser implements Bootstarpable {

    private static final Logger logger = LoggerFactory.getLogger(SyncFileParser.class);

    public static final String FTP_READY_TRANS_DATA = "FTP:READY:TRANS:DATA";
    public static final String SYNC_FTP_FILE_INDEX = "SYNC:FTP:FILES";
    //FTP文件解析锚点
    public static final String FTP_DATA_PARSE_ANCHOR = "FTP:DATA:PARSE:ANCHOR";
    public static final String LAST_SYNC_TIME = "lst";
    public static final String LAST_SYNC_DIRECTORY_FILES_COUNT = "lsdfc";
    public static final String LAST_SYNC_FILE_NAME = "lsfn";
    public static final String LAST_SYNC_FILE_CREATE_TIME = "lsfct";
    public static final String LAST_SYNC_FILE_CHANGE_FLAG = "lsfcf";
    public static final String LAST_SYNC_TRANS_ID = "lsti";
    public static final String LAST_SYNC_TRANS_DATA_SIZE = "lstds";
    public static final String LAST_SYNC_TRANS_DATA_FRAGMENTS = "lstdf";
    public static final String LAST_SYNC_CURR_DATA_FRAGMENT = "lscdf";

    private DexFtpClient dexFtpClient;

    private SyncFileParser.FileParse fileParse;
    private RedisClient redisClient = RedisClient.instance();
    private ExchangeConfig exchangeConfig;
    private boolean exit = false;

    public SyncFileParser(DexFtpClient dexFtpClient) {
        this.dexFtpClient = dexFtpClient;
        this.fileParse = new SyncFileParser.FileParse();
    }

    /**
     * 开始启动监听FTP_READY_TRANS_DATA数据
     */
    @Override
    public void start() {
        //初始化FTP监控状态表
        init();
        fileParse.start();
    }

    /**
     * 同步文件解析程序启动前初始化<br>
     * 初始化解析锚点标记事务ID为-1
     */
    public void init() {
        redisClient.hmsetx(FTP_DATA_PARSE_ANCHOR, "trans", "-1",
                "status", "N",
                "startTime", String.valueOf(System.currentTimeMillis()));
    }


    /**
     * 同步文件停止解析
     */
    @Override
    public void stop() {
        exit = true;
        logger.info("同步文件解析组件停止");
    }

    /**
     * 远程FTP文件目录状态监控
     */
    class FileParse extends Thread {
        @Override
        public void run() {
            while (true) {
                if (exit)
                    break;
                long currentTransId = 0;
                Set<Tuple> transData;
                boolean transactionDataHasError = false;
                try {
                    //读取解析锚点信息
                    Map<String, String> anchor = redisClient.hgetAll(FTP_DATA_PARSE_ANCHOR);
                    if (null == anchor || anchor.isEmpty() || null == anchor.get("trans")) {
                        logger.debug("解析锚点数据为空");
                        delayTime();
                        continue;
                    }

                    //获取已处理过的事务ID及状态
                    Long preTransId = Long.valueOf(anchor.get("trans"));
                    String status = anchor.getOrDefault("status", "N");
                    //事务内第一条数据录入时间
                    Long waitTime = System.currentTimeMillis() - Long.valueOf(anchor.getOrDefault("startTime",
                            String.valueOf(System.currentTimeMillis())));
                    currentTransId = preTransId + 1;//下一次待处理事务ID

                    //取出待处理事务数据集合中第1条(根据事务ID顺序取出第1条)
                    Set<Tuple> readyTransDataIndex =
                            redisClient.zrangeWithScore(FTP_READY_TRANS_DATA, 0, 0);
                    if (null == readyTransDataIndex || readyTransDataIndex.isEmpty()) {
                        logger.info("待解析事务数据为空");
                        //TODO:检测超时数据
                        delayTime();
                        continue;
                    }

                    //按顺序取出待处理事务索引数据第1条
                    Tuple topReadyTransDataIndex = readyTransDataIndex.iterator().next();
                    long topReadyTransDataId = (long) topReadyTransDataIndex.getScore();//第1条数据事务ID
                    //表示当前解析刚刚开始，则默认处理FTP_READY_TRANS_DATA第一条数据
                    if (0 == currentTransId) {
                        currentTransId = topReadyTransDataId;
                    }

                    //如果已就绪数据队列中第一条数据事务号与解析锚点事务号不一致，则需要进行报警
                    if (topReadyTransDataId != currentTransId) {
                        parseWarn(topReadyTransDataId,
                                String.format("解析锚点与当前已就绪事务数据存在缺失数据，已解析事务ID:%d,当前事务ID:%d",
                                        preTransId, topReadyTransDataId));
                        logger.warn("解析锚点与当前已就绪事务数据不一致，锚点标记解析事务ID:{},实际解析事务ID:{}",
                                currentTransId, topReadyTransDataId);
                        currentTransId = topReadyTransDataId;
                    }

                    //第1条数据事务数据索引
                    String[] val = topReadyTransDataIndex.getElement().split(" ");
                    transData = redisClient.zrangeByScoreWithScores(SYNC_FTP_FILE_INDEX,
                            Double.valueOf(val[0]),
                            Double.valueOf(val[1]));
                    if (null == transData || transData.isEmpty()) {
                        parseWarn(currentTransId, "待解析事务数据片断索引值为空");
                        continue;
                    }
                    logger.debug("开始解析事务{}数据，起始分片:{},结束分片:{}", currentTransId, val[0], val[1]);
                } catch (Exception ex) {
                    parseError(currentTransId, "计算解析事务数据错误", ex);
                    //将已就绪队列数据事务+1
                    //清空已就绪列表(不管此事务是否解析成功，均清空，错误则记录日志)
                    redisClient.zremrangeByScore(FTP_READY_TRANS_DATA, currentTransId, currentTransId);
                    //将解析锚点中已解析完成事务标记为当前事务ID
                    Map<String, String> map = new HashMap<>();
                    map.put("trans", String.valueOf(currentTransId));
                    map.put("startTime", String.valueOf(System.currentTimeMillis()));
                    map.put("status", "N");
                    redisClient.hmset(FTP_DATA_PARSE_ANCHOR, map);
                    continue;
                }

                StringBuilder transStrData = new StringBuilder();
                StringBuilder buffer = new StringBuilder();

                for (Tuple remoteObj : transData) {
                    String fileName = remoteObj.getElement();
                    PooledObject<FTPClient> pooledObject = null;
                    InputStream input = null;
                    BufferedInputStream bis = null;
                    try {
                        pooledObject = dexFtpClient.get();
                        FTPClient ftpClient = pooledObject.getObject();
                        ftpClient.setFileType(FTPClient.ASCII_FILE_TYPE);
                        ftpClient.enterLocalPassiveMode();
                        input = ftpClient.retrieveFileStream("/" + fileName);
                        if (null == input) {
                            parseWarn(currentTransId, String.format("从FTP加载事务片断%s内容为空", fileName));
                            logger.error("加载FTP事务数据片断{}内容为空", fileName);
                            transactionDataHasError = true;
                            //备份空文件
                            backupReceiveErrorData(fileName, "");
                        } else {
                            logger.debug("加载FTP事务数据片断:{}成功", fileName);
                            bis = new BufferedInputStream(input);
                            buffer.delete(0, buffer.length());
                            byte tmp[] = new byte[1024];
                            int byteRead;
                            while ((byteRead = bis.read(tmp)) > 0) {
                                buffer.append(new String(tmp, 0, byteRead));
                            }
                            //只有前面的分片解析没有错误时才向transData缓冲区中写入当前分片内容
                            if (!transactionDataHasError) {
                                transStrData.append(buffer);
                            }
                            //备份
                            backupReceiveData(fileName, buffer.toString());
                            buffer.delete(0, buffer.length());
                            input.close();
                            ftpClient.completePendingCommand();
                        }

                        //读取完成后删除FTP目录文件
                        ftpClient.deleteFile(fileName);
                        //删除Redis文件索引表
                        redisClient.zremrangeByScore(SYNC_FTP_FILE_INDEX, remoteObj.getScore(), remoteObj.getScore());
                    } catch (IOException ex) {
                        parseError(currentTransId,"处理事务分片数据错误",fileName,ex);
                        logger.error("处理事务分片数据错误:{}，分片文件名称:{}", ex.getMessage(), fileName, ex);
                        //一旦分片解析错误则将此事务数据全部转储至备份队列
                        transactionDataHasError = true;
                    } finally {
                        try {
                            if (null != input)
                                input.close();
                            if (null != bis) {
                                bis.close();
                            }
                            dexFtpClient.close(pooledObject);
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                }

                //事务分片没有任何错误则将数据逐条记录至已就绪事务数据队列中
                if (!transactionDataHasError) {
                    //将事务数据转储至队列(逐条)
                    String line = null;
                    int rowIndex = 0;
                    BufferedReader reader = null;
                    try {
                        reader = new BufferedReader(new StringReader(transStrData.toString()));
                        while ((line = reader.readLine()) != null) {
                            // 单条记录
                            pushDataToQueue(line.getBytes("UTF-8"));
                            rowIndex++;
                        }
                    } catch (IOException e) {
                        parseError(currentTransId,
                                String.format("添加事务记录数据至队列错误,错误发生在事务数据第%d行", rowIndex),
                                line, e);
                        logger.error("添加事务记录数据:{}至队列错误,错误发生在事务数据第{}行", line, rowIndex, e);
                    } finally {
                        //关闭reader
                        if (null != reader) {
                            try {
                                reader.close();
                            } catch (IOException e) {
                                e.printStackTrace();
                            }
                        }
                    }
                }
                //清空缓冲区数据
                transStrData.delete(0, transStrData.length());
                transData.clear();
                //清空已就绪列表(不管此事务是否解析成功，均清空，错误则记录日志)
                redisClient.zremrangeByScore(FTP_READY_TRANS_DATA, currentTransId, currentTransId);
                //将解析锚点中已解析完成事务标记为当前事务ID
                Map<String, String> map = new HashMap<>();
                map.put("trans", String.valueOf(currentTransId));
                map.put("startTime", String.valueOf(System.currentTimeMillis()));
                map.put("status", "Y");
                redisClient.hmset(FTP_DATA_PARSE_ANCHOR, map);
                logger.debug("事务{}解析成功", currentTransId);
            }
        }
    }

    private void parseWarn(long transactionId, String msg) {
        parseLog("warn", transactionId, msg, null, null);
    }

    private void parseWarn(long transactionId, String msg, String data) {
        parseLog("warn", transactionId, msg, data, null);
    }

    private void parseError(long transactionId, String msg, Exception ex) {
        if (null != ex)
            parseLog("error", transactionId, msg, null, ex.getMessage());
        else
            parseLog("error", transactionId, msg, null, null);
    }

    private void parseError(long transactionId, String msg, String data, Exception ex) {
        parseLog("error", transactionId, msg, data, ex.getMessage());
    }

    private void parseLog(final String level, long transactionId, String message, String data, String error) {
        try {
            redisClient.lpush(LOG_PARSE, new JSONObject() {{
                put("level", level);
                put("trans", transactionId);
                put("msg", message);
                put("data", data);
                put("error", error);
            }}.toJSONString());
        } catch (Exception ex) {
            logger.error("", ex);
        }
    }

    private void backupReceiveData(String fileName, String data) {
        /*try {
            File backDir = new File(exchangeConfig.getReceiveBackup());
            if (!backDir.exists() || !backDir.isDirectory()) {
                if (!backDir.mkdirs()) {
                    logger.error("创建接收数据备份目录[]失败", backDir.getPath());
                }
            }

            Path path = Paths.get(String.format("%s/%s", backDir.getPath(), fileName));
            Files.write(path, data);
        } catch (IOException e) {
            e.printStackTrace();
        }*/

        FileWriter writer = null;
        try {

            // 打开一个写文件器，构造函数中的第二个参数true表示以追加形式写文件
            String fullFileName = String.format("%s/%s", exchangeConfig.getReceiveBackup(), fileName);
            writer = new FileWriter(fullFileName);
            writer.write(data);
            logger.debug("备份FTP事务数据片断至receive目录:{}成功", fileName);
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                if (writer != null) {
                    writer.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    private void backupReceiveErrorData(String fileName, String data) {
        /*try {
            File backDir = new File(exchangeConfig.getReceiveError());
            if (!backDir.exists() || !backDir.isDirectory()) {
                if (!backDir.mkdirs()) {
                    logger.error("创建接收数据备份目录[]失败", backDir.getPath());
                }
            }

            Path path = Paths.get(String.format("%s/%s", backDir.getPath(), fileName));
            Files.write(path, data);
            logger.debug("备份FTP事务数据片断:{}成功", fileName);
        } catch (IOException e) {
            e.printStackTrace();
        }*/

        FileWriter writer = null;
        try {

            // 打开一个写文件器，构造函数中的第二个参数true表示以追加形式写文件
            String fullFileName = String.format("%s/%s", exchangeConfig.getReceiveError(), fileName);
            writer = new FileWriter(fullFileName);
            writer.write(data);
            logger.debug("备份FTP事务异常数据片断至receive-error目录:{}成功", fileName);
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                if (writer != null) {
                    writer.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    private void pushDataToQueue(byte[] data) {
        try {
            redisClient.lpush(Constant.SYNC_TRANS_RESULT_DATA.getBytes("UTF-8"), data);
        } catch (Exception ex) {
            logger.error("存储事务数据至队列错误", ex);
        }
    }

    private void delayTime() {
        try {
            Thread.sleep(1000);
        } catch (InterruptedException ex) {
            logger.error("延迟错误", ex);
        }
    }

    public void setExchangeConfig(ExchangeConfig exchangeConfig) {
        this.exchangeConfig = exchangeConfig;
    }
}
