package cn.com.bonc.khjy.schedu;

import cn.com.bonc.khjy.bean.FtpRuleBean;
import cn.com.bonc.khjy.utils.*;
import cn.hutool.core.thread.ThreadUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.commons.io.IOUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.DefaultResourceLoader;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;

import javax.annotation.PostConstruct;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;

/**
 * Created by hanbing on 2019/5/31.
 */
@Component
public class FileFTPMonitor {
    private final static Log logger = LogFactory.get();
    private final static String F1 = "temp.file.store.path";
    private final static String F2 = "file.store.path";
    private final static String F3=  "max.file.line";
    private final static String F4 = "ftp.remote.dir";
    private final static String F5 = "ftp.server.ip";
    private final static String F6 = "ftp.server.port";
    private final static String F7 = "ftp.username";
    private final static String F8 = "ftp.password";
    private final static String F9 = "ftp.local.dir";
    private final static String F10 = "ftp.prefix.filename1";
    private final static String F11 = "ftp.prefix.filename2";
    private final static String DEFAULTVALUE = "RGNULL";
    @Value( "${max.file.line}" )
    private int maxLine;
    @Value("${workorders.xcloud.cols}")
    private String cols;
    //推送规则变量
    private Map<String,FtpRuleBean> ruleList;
    //消息队列-缓存变量
    private Map<String,LinkedBlockingQueue< String >> workOrderInfo;
    //是否启动线程控制变量
    private volatile boolean tempFileStop = false;
    private volatile boolean ftpFileStop = false;
    //临时文件缓存变量
    private Map< String, String > tempFileMap = new ConcurrentHashMap<>();

    private SimpleDateFormat df = new SimpleDateFormat("yyyyMMdd");//设置日期格式


    //文件队列-缓存变量
    private volatile  Map<String, LinkedBlockingQueue< Map >>  ftpFileQueue ;


    public Map<String, LinkedBlockingQueue<Map>> getFtpFileQueue() {
        return ftpFileQueue;
    }

    public void setFtpFileQueue(Map<String, LinkedBlockingQueue<Map>> ftpFileQueue) {
        this.ftpFileQueue = ftpFileQueue;
    }

    public Map<String,FtpRuleBean> getRuleList() {
        return ruleList;
    }

    public void setRuleList(Map<String,FtpRuleBean> ruleList) {
        this.ruleList = ruleList;
    }

    public Map<String, LinkedBlockingQueue< String >> getWorkOrderInfo() {
        return workOrderInfo;
    }

    public void setWorkOrderInfo(Map<String, LinkedBlockingQueue< String >> workOrderInfo) {
        this.workOrderInfo = workOrderInfo;
    }
    private Date startTime;
    public void setStartTime(Date startTime) {
        this.startTime = startTime;
    }
    @PostConstruct
    public void init() {
        logger.debug("FTP传输触点规则开始加载：{}", "ContactSource.json");
        try {
            Map<String,FtpRuleBean> ruleValue=new HashMap<>();
            Map<String,LinkedBlockingQueue<String>> workOrderInfoValue=new HashMap<>();
            Map<String, LinkedBlockingQueue< Map >>  ftpFileQueue1=new HashMap<>() ;
            InputStream in = new DefaultResourceLoader().getResource("classpath:ContactSource.json").getInputStream();
            String jsonData = IOUtils.toString(in, "UTF-8");
            Map< String, ? > map = JacksonMapper.INSTANCE.readJsonToObject( jsonData );
            String ruleStr=JacksonMapper.INSTANCE.writeObjectToJson(map.get("list"));
            logger.debug("FTP传输触点规则内容：{}",ruleStr);
            List< Map > ruleList= JacksonMapper.INSTANCE.readJsonToList(ruleStr);
            for (Map tmpMap:
            ruleList ) {
                Iterator<Map.Entry<String, Map>> iterator = tmpMap.entrySet().iterator();
                while (iterator.hasNext()) {
                    Map.Entry<String, Map> entry = iterator.next();
                    String id=entry.getKey();
                    Map tmp=entry.getValue();
                    ruleValue.put(id,new FtpRuleBean(String.valueOf(tmp.get(F1)),String.valueOf(tmp.get(F2)),String.valueOf(tmp.get(F3)),String.valueOf(tmp.get(F4)),String.valueOf(tmp.get(F5)),String.valueOf(tmp.get(F6)),String.valueOf(tmp.get(F7)),String.valueOf(tmp.get(F8)),String.valueOf(tmp.get(F9)),String.valueOf(tmp.get(F10)),String.valueOf(tmp.get(F11))));
                    workOrderInfoValue.put(id,new LinkedBlockingQueue<>( 1000000 ));
                    ftpFileQueue1.put(id,new LinkedBlockingQueue<>( 500 ));
                }
            }
            this.setRuleList(ruleValue);
            this.setWorkOrderInfo(workOrderInfoValue);
            this.setFtpFileQueue(ftpFileQueue1);
            logger.debug("传输加载规则完成-队列初始化完成：{}", workOrderInfoValue.size());
        }catch (Exception ex){
            logger.error("初始化传输规则失败",ex);
        }
    }


    private  static   List<String> extractVariable(String msg) {

        List<String> list = new ArrayList<String>();
        int start = 0;
        int startFlag = 0;
        int endFlag = 0;
        for (int i = 0; i < msg.length(); i++) {
            if (msg.charAt(i) == '{') {
                startFlag++;
                if (startFlag == endFlag + 1) {
                    start = i;
                }
            } else if (msg.charAt(i) == '}') {
                endFlag++;
                if (endFlag == startFlag) {
                    list.add(msg.substring(start + 1, i));
                }
            }
        }
        return list;
    }
    private   String[] parseJsonToFTPString(String jsonData){
        //templateContent
        Map<String, ?> map = JacksonMapper.INSTANCE.readJsonToObject(jsonData);
        String templateContent=PrintUtil.isNotBlank( String.valueOf(map.get("templateContent")))?String.valueOf(map.get("templateContent")):"";
        logger.debug("模板信息:{}",templateContent);
        String str = String.valueOf(map.get("data"));
        Map< String, ? > data = JacksonMapper.INSTANCE.readJsonToObject(str);
        StringBuilder tagValue = new StringBuilder();
        StringBuilder header = new StringBuilder();
        List<String> headerList=new ArrayList<>();
        header.append("channelCode");//消息流中的触点ID
        header.append("|").append("activityRemarks");//消息流中外部关联活动ID

        String[] strTemp=this.cols.split(",");
        for(int i=0;i<strTemp.length;i++){
            headerList.add(strTemp[i]);
        }
        String activityRemarks =  String.valueOf(map.get( "activityRemarks" ));
        if (!StringUtils.isEmpty(map.get( "channelCode" ))){
            String id =  map.get( "channelCode" ).toString();
            tagValue.append(id);
        }else {
            logger.debug("触点channelCode 关键参数为空,无法生成文件");
        }
        if(PrintUtil.isNotBlank(activityRemarks)){
            tagValue.append("|").append(activityRemarks);
        }else{
            logger.debug("关联活动IDactivityRemarks 关键参数为空");
            tagValue.append("|").append(DEFAULTVALUE);
        }
        if(PrintUtil.isNotBlank(templateContent)){
            List<String> orderBls=extractVariable(templateContent);
            for(int i=0;i<orderBls.size();i++){
                String key=orderBls.get(i);
                if(!headerList.contains(key)){
                    tagValue.append("|");
                    tagValue.append(String.valueOf(data.get(key)));
                    header.append("|");
                    header.append(key);
                }
            }
        }else{
            Iterator<? extends Map.Entry<String, ?>> iterator = data.entrySet().iterator();
            while (iterator.hasNext()) {
                Map.Entry<String, ?> entry =   iterator.next();
                String key=entry.getKey();
                if(!headerList.contains(key)){
                    tagValue.append("|");
                    tagValue.append(PrintUtil.isNotBlank(String.valueOf(entry.getValue()))?String.valueOf(entry.getValue()):" ");
                    header.append("|");
                    header.append(key);
                }
            }
        }
        String[] valueHeader=new String[2];
        valueHeader[0]=tagValue.toString();
        valueHeader[1]=header.toString();
        return valueHeader;
    }


    /**
     * 推送工单到FTP线程队列处理
     */
    public void toThreadStart() {



                    Map<String, LinkedBlockingQueue< String >> queueMap= this.getWorkOrderInfo();

                         Map<String, LinkedBlockingQueue< Map >>  fileQueueMap=this.getFtpFileQueue();
                    Iterator<Map.Entry<String, LinkedBlockingQueue< String >>> iterator = queueMap.entrySet().iterator();
                    while (iterator.hasNext()) {
                        Map.Entry<String, LinkedBlockingQueue< String >> entry = iterator.next();
                        String id=entry.getKey();
                        FtpRuleBean ftpRuleBean=this.getRuleList().get(id);
                        logger.debug("监测工单消息队列-是否满足阈值条件....:{},{}",ftpFileStop,id);
                        LinkedBlockingQueue<String> queue=entry.getValue();
                            ThreadUtil.execAsync( () -> {
                                while ( !tempFileStop ) {
                                   consumptionKafkaInfo(id, ftpRuleBean, queue,fileQueueMap.get(id));
                                }
                            });
                            ThreadUtil.execAsync( () -> {
                                while ( !ftpFileStop ) {
                                    TreadUpFile(id,ftpRuleBean);
                                }
                            });

                    }






    }


    private  int getF4Sum(String cid,String reid){
          String synTime = df.format(new Date());
        Map<Object, Object> f4exeHis = RedisUtil.hmget("workorder:count:" + cid+"_"+reid+"_"+synTime );
        int y=0;
        for(Object key:f4exeHis.keySet()){
            Object value= f4exeHis.get(key);
            int x=Integer.parseInt(String.valueOf(value));
            y=x+y;
        }
        return y==0?maxLine:y;
    }
    private  String getF4Time(String cid,String reid){
        String synTime = df.format(new Date());
        Map<Object, Object> f4exeHis = RedisUtil.hmget("workorder:status:" + cid+"_"+reid+"_"+synTime );

        for(Object key:f4exeHis.keySet()){
            Object value= f4exeHis.get(key);
            return value.toString();
        }
        return synTime;

    }
    private  String getAidType(String cid,String reid){
        String type ="0";
        Map<Object, Object> f4exeHis = RedisUtil.hmget("workorder:type:" + cid+"_"+reid+"_"+ df.format(new Date()) );

        for(Object key:f4exeHis.keySet()){
            Object value= f4exeHis.get(key);
            return value.toString();
        }
        return type;

    }

    private void consumptionKafkaInfo(String id, FtpRuleBean ftpRuleBean, LinkedBlockingQueue<String> queue,LinkedBlockingQueue< Map > fileQueue) {
        try {
            String strDl=queue.take();
            String[] strDls=parseJsonToFTPString(strDl);
            String   input = strDls[0];
            String   header =strDls[1];
            logger.debug( "工单消息队列：{},{}", input,header);
            String reid = input.split("\\|")[1];
            String filePath = tempFileMap.get(id+"|"+reid);
            if ( StrUtil.isBlank( filePath ) ) {
                this.setStartTime(new Date());
                filePath = this.getLocalFile(input, id);
                tempFileMap.put(id+"|"+reid, filePath );
                FileUtil.createFile( filePath );
                File file = new File( filePath );
                String[] lineInfo=header.split("\\|");
                List<String> lineInfoLs=new ArrayList<>();
                for(int k=2;k<lineInfo.length;k++){
                    lineInfoLs.add(lineInfo[k]);
                }
                FileUtil.saveFile(String.join("|", lineInfoLs), file );
                logger.info( "文件生成中：{},{},{},{}", id,reid,filePath,header);
            }
            File file = new File( filePath );
            String[] lineInfo=input.split("\\|");
            List<String> lineInfoLs=new ArrayList<>();
            for(int k=2;k<lineInfo.length;k++){
                lineInfoLs.add(lineInfo[k]);
            }
            FileUtil.saveFile( String.join("|", lineInfoLs)  , file );
            int count = FileUtil.getFileLineCount( file );
            if ( (count-1) >= getF4Sum(id,reid)) {
                logger.debug( "文件行数：{},{}",filePath, count  );
                logger.debug("满足阈值-开始移动文件:{}",filePath);
                if ( FileUtil.moveFile( ftpRuleBean.getFilePath(), filePath ) ) {
                    String ftpFileName = filePath.substring( filePath.lastIndexOf( File.separator ), filePath.length() ).replace( ".temp", ".txt" );
                    String ftpFilePath = Paths.get( ftpRuleBean.getFilePath(), ftpFileName ).toString();
                    logger.debug("文件行移动成功:{}",ftpFilePath);
                    //GZipUtils.compress(new File(ftpFilePath), true);
                    Map< String, String > fileMap = new ConcurrentHashMap<>();
                    //fileMap.put( id, ftpFilePath + GZipUtils.EXT );
                    fileMap.put(id+"|"+reid, ftpFilePath );
                    fileQueue.put( fileMap );
                    logger.debug( "重置临时文件:{}",(id+"|"+reid));
                    tempFileMap.put(id+"|"+reid, "" );
                } else {
                    logger.debug( "移动文件失败"  );
                }
            }
        } catch ( InterruptedException e ) {
            logger.error( e, SystemMessageId.CMP0705, Thread.currentThread().getName() );
        } catch (Exception e) {
            logger.error(e);
        }
    }

    private void TreadMovFile() {
        try {
            if(tempFileMap!=null&&tempFileMap.size()>0){
                Map<String, LinkedBlockingQueue< Map >>  fileQueueMap=this.getFtpFileQueue();
                Iterator<Map.Entry<String, String>> iteratorFile = tempFileMap.entrySet().iterator();
                while (iteratorFile.hasNext()) {
                    Map.Entry<String, String> entryFile = iteratorFile.next();
                    String filePathId=entryFile.getKey();
                    String filePath=entryFile.getValue();
                    if(PrintUtil.isNotBlank(filePath)){
                        String[] lineInfo=filePathId.split("\\|");
                        FtpRuleBean ftpRuleBean=this.getRuleList().get(lineInfo[0]);
                        logger.debug("等待超时-开始移动文件:{}",filePath);
                        if ( FileUtil.moveFile( ftpRuleBean.getFilePath(), filePath ) ) {
                            String ftpFileName = filePath.substring( filePath.lastIndexOf( File.separator ), filePath.length() ).replace( ".temp", ".txt" );
                            String ftpFilePath = Paths.get( ftpRuleBean.getFilePath(), ftpFileName ).toString();
                            logger.debug("文件行移动成功:{}",ftpFilePath);
                            Map< String, String > fileMap = new ConcurrentHashMap<>();
                            fileMap.put(filePathId, ftpFilePath );
                            fileQueueMap.get(lineInfo[0]).put( fileMap );
                            logger.debug( "重置临时文件:{}", filePathId);
                            tempFileMap.put(filePathId, "" );
                        }
                    }else{
                        logger.debug( "等待超时-检查是否有需要移动的文件:{}",false );
                    }

                }
            }else{
                logger.debug( "等待超时-检查是否有需要移动的文件:{}",false );
            }

        } catch (InterruptedException e) {
            e.printStackTrace();
        }catch (Exception e) {
            e.printStackTrace();
        }
    }

    public void movFile(){

        //TreadMovFile();
    }

    private void TreadUpFile(String id,FtpRuleBean ftpRuleBean) {


        try {
            Map< String, String > fileMap = null;
            Map<String, LinkedBlockingQueue< Map >>  fileQueueMap=this.getFtpFileQueue();
            fileMap = fileQueueMap.get(id).take();
            logger.debug( "获取到待上传文件：{}", fileMap );
            Iterator<Map.Entry<String, String>> iteratorFile = fileMap.entrySet().iterator();
            while (iteratorFile.hasNext()) {
                Map.Entry<String, String> entryFile = iteratorFile.next();
                String pathAndId=entryFile.getKey();
                String cid=pathAndId.split("\\|")[0];
                String aid=pathAndId.split("\\|")[1];
                String ftpFilePath=entryFile.getValue();
                if ( StrUtil.isNotBlank( ftpFilePath )  ) {
                    File ftpFile = new File( ftpFilePath );
                    boolean ftpSuccess=false;
                      ftpSuccess = uploadFtpFile( id,ftpRuleBean.getFtpDir(), ftpFilePath, ftpFile.getName() );

                    String nowTime= CalendarUtil.toShortTimeString(new Date(),"yyyy-MM-dd HH:mm");
                    logger.info( "开始时间{},结束时间{},周期{},文件名称{}，数据集记录数{}，文件记录数{}，是否成功{}，目的地{}",this.getF4Time(cid,aid),nowTime,this.getAidType(cid,aid),ftpFile.getName(),getF4Sum(cid,aid),FileUtil.getFileLineCount( ftpFile ),ftpSuccess,ftpRuleBean.getFtpDir());
                    if ( !ftpSuccess  ) {
                        //fileQueueMap.get(id).put( fileMap );
                        logger.debug( "文件上传失败已加入文件队列等待下次触发：{}", fileMap );
                    }

                }

            }
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    /**
     * 启动容器成功事调用，配置线程队列处理只执行一次
     */
    public void toThreadStop() {
        tempFileStop = true;
        ftpFileStop = true;
    }
    /**
     * 根据触点ID获取FTP规则
     * @param id 触点ID
     * @param remotePath 远程请求指定存放目录
     * @param filePath 本地文件路径
     * @param targetFileName 目标文件名称
     * @return
     */
    private boolean uploadFtpFile(String id, String remotePath, String filePath, String targetFileName ) {
        logger.debug( "开始将文件上传至FTP" );
        logger.debug("触点ID:,{}",id);
        logger.debug("远程请求指定存放目录:,{}",remotePath);
        logger.debug("本地文件路径:,{}",filePath);
        logger.debug("目标文件名称:,{}",targetFileName);

       /* try {

            FtpRuleBean ftpRuleBean=this.getRuleList().get(id);
            logger.debug( "连接FTP服务器..." );
            boolean connected = ftpUtil.connect( ftpRuleBean.getFtpIp(), Integer.parseInt(ftpRuleBean.getFtpPort()), ftpRuleBean.getUsername(), ftpRuleBean.getPassword() );
            int retryTimes = 0;
            boolean isSuccess = false;
            logger.debug( "连接FTP服务器...{}", connected ? "成功" : "失败" );
            if ( connected ) {
                isSuccess = ftpUtil.uploadFile( remotePath, filePath, targetFileName );
                while ( !isSuccess ) {
                    logger.debug( "上传文件：{}到ftp失败，重试次数：{}", targetFileName, retryTimes );
                    isSuccess = ftpUtil.uploadFile( remotePath, filePath, targetFileName );
                    retryTimes++;
                    if ( retryTimes >= 3 ) {
                        logger.debug( "上传文件：{}到ftp失败，重试次数：{}，放弃上传", targetFileName, retryTimes );
                        break;
                    }
                }
            }
            return isSuccess;
        } catch ( Exception e ) {
            logger.error( e.getMessage(), e );
            return false;
        }*/

        FtpRuleBean ftpRuleBean=this.getRuleList().get(id);
        FtpUploadUtil ftpUploadUtil=new FtpUploadUtil();
        boolean isSuccess = false;
        try {
            isSuccess= ftpUploadUtil.uploadFile(ftpRuleBean.getFtpIp(), Integer.parseInt(ftpRuleBean.getFtpPort()),ftpRuleBean.getUsername(), ftpRuleBean.getPassword(),remotePath,filePath);
            logger.info( "上传文件到ftp状态:{},{}", isSuccess?"成功":"失败",filePath);
            while ( !isSuccess ) {
                isSuccess= ftpUploadUtil.uploadFile(ftpRuleBean.getFtpIp(), Integer.parseInt(ftpRuleBean.getFtpPort()),ftpRuleBean.getUsername(), ftpRuleBean.getPassword(),remotePath,filePath);
                logger.debug( "上传文件到ftp状态:{},{}", isSuccess?"成功":"失败",filePath);
            }

        } catch (IOException e) {
            e.printStackTrace();
        }
        return isSuccess;
    }
    /**
     *  生产临时文件名称
     * @param id 触点ID
     * @return
     */
    private String getLocalFile(String input, String id){
        logger.debug( "getLocalFile-根据触点ID获取FTP规则" );
        FtpRuleBean ftpRuleBean=this.getRuleList().get(id);
        String[] lines=input.split("\\|");
        String activityRemarks =lines[1];
        String latnId = PrintUtil.isNotBlank(ftpRuleBean.getPrefix1())?ftpRuleBean.getPrefix1():"755";
        String customerGroupId = activityRemarks;
        String date = new SimpleDateFormat( "yyyyMMddHHmmss" ).format( new Date() );
        Path path = Paths.get(ftpRuleBean.getTempPath(), latnId + "_" + customerGroupId + "_custgrouplist_data_" + date + ".temp");
        logger.debug("生成文件名:{}" ,path!=null?path.getFileName():"获取文件名称失败");
        return path.toString();
    }


    /**
     * 根据消息格式中第一个元素触点ID添加消息队列
     * @param input kafka中的工单消息 转换后的 id|2|3.....
     */
    public void addQueueLine(String input){
        String[] strs= parseJsonToFTPString(input);
        String str=strs[0];
        String[] lines=str.split("\\|");
        String id=lines[0];
        LinkedBlockingQueue queue= this.getWorkOrderInfo().get(id);
        try {
            if ( StrUtil.isNotBlank( str ) ) {
                queue.put( input );

            }
        } catch ( InterruptedException e ) {
            logger.error( e, SystemMessageId.CMP0705, Thread.currentThread().getName() );
        }
    }
   /* private List<String> getCols(int num){
        List<String> result = new ArrayList<>();
        
        for(int i=0; i< num; i++){
            result.add(getCol(i));
        }

        return result;
    }

    private String getCol(int i){
        int x1 = i / 26;
        int x2 = i % 26;

        if(x1 == 0){
            return getChar(x2) + "";
        }else{
            return getChar(x1-1) + "" + getChar(x2) + "";
        }

    }

    private char getChar(int i){
        return (char) ('a' + i);
    }*/
}
