package com.jy.datapipeline.export.quartz;

import com.alibaba.fastjson.JSONObject;
import com.jcraft.jsch.ChannelSftp;
import com.jy.datapipeline.common.config.DBMetaDataConfig;
import com.jy.datapipeline.common.util.*;
import com.jy.datapipeline.export.dao.ExpTaskExecuteDetailLogDao;
import com.jy.datapipeline.export.dao.ExpTaskExecuteLogDao;
import com.jy.datapipeline.export.dao.repo.*;
import com.jy.datapipeline.export.entity.DataSourceInfoModel;
import com.jy.datapipeline.export.entity.DataStoreInfoModel;
import com.jy.datapipeline.export.entity.ExpRuleDetailModel;
import com.jy.datapipeline.export.entity.ExpRuleModel;
import com.jy.datapipeline.export.entity.ExpTaskExecuteDetailLogModel;
import com.jy.datapipeline.export.entity.ExpTaskModel;
import com.jy.datapipeline.export.service.DataDictionaryService;
import com.jy.datapipeline.export.task.DataFileBean;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Reader;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLXML;
import java.sql.Timestamp;
import java.util.Date;
import java.util.Locale;
import java.util.concurrent.Executor;
import javax.annotation.Resource;
import javax.sql.DataSource;

import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.jetbrains.annotations.NotNull;
import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.quartz.JobKey;
import org.quartz.Scheduler;
import org.quartz.TriggerKey;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.quartz.QuartzJobBean;
import org.springframework.stereotype.Component;

@Slf4j
@Component
public class JobDetailSingleShardHandler extends QuartzJobBean {
    @Autowired
    private ExpTaskExecuteDetailLogDaoRepo expTaskExecuteDetailLogDaoRepo;
    @Autowired
    private DBMetaDataConfig dbMetaDataConfig;
    @Autowired
    private DataSourceInfoDaoRepo dataSourceInfoDaoRepo;
    @Autowired
    private ExpRuleDaoRepo expRuleDaoRepo;
    //@Value("${config.fetchSize}")
    private int fetchSize;

    //@Value("${config.printOutSize}")
    private int printOutSize;

   // @Value("${config.localStorePath}")
    private String  localStorePath;
    @Autowired
    private ExpRuleDetailDaoRepo expRuleDetailDaoRepo;
    @Autowired
    private ExpTaskDaoRepo expTaskDaoRepo;
    @Autowired
    private ExpTaskExecuteDetailLogDao expTaskExecuteDetailLogDao;
    @Autowired
    private DataStoreInfoDaoRepo dataStoreInfoDaoRepo;
    private DataStoreInfoModel dataStoreInfoModel;
    private DataSource dataSource;
    private ExpRuleModel expRuleModel;
    private ExpRuleDetailModel expRuleDetailModel;
    private  ExpTaskModel expTaskModel;
    private DataSourceInfoModel dataSourceInfoModel;
    private ExpTaskExecuteDetailLogModel expTaskExecuteDetailLogModel;
    private int retryCount=0;
    private String detailTaskId = "";

    private DataDictionaryService dataDictionaryService;
    @Autowired
    private Scheduler scheduler;

    @Override
    protected void executeInternal(@NotNull JobExecutionContext context) throws JobExecutionException {
        //获取任务的id
        if(scheduler==null){
             scheduler = context.getScheduler();
        }
        JobDataMap jobDataMap = context.getJobDetail().getJobDataMap();
        String detailTaskId = (String) jobDataMap.get("id");
        fetchSize=Integer.parseInt(jobDataMap.get("fetchSize")==null?"1000":jobDataMap.get("fetchSize").toString());
        localStorePath=(String) jobDataMap.get("localStorePath");
        printOutSize=Integer.parseInt(jobDataMap.get("printOutSize")==null?"1000":jobDataMap.get("printOutSize").toString());
        remoteSimpleTask();
        singleShardExec(detailTaskId);


    }
    public  void remoteSimpleTask(){
        try {
            TriggerKey triggerKey = TriggerKey.triggerKey(detailTaskId);
            scheduler.pauseTrigger(triggerKey);// 停止触发器
            scheduler.unscheduleJob(triggerKey);// 移除触发器
            scheduler.deleteJob(JobKey.jobKey(detailTaskId));// 删除任务
        }catch (Exception e){
            log.error("remoteSimpleTask error",e);
        }
    }

    public  void singleShardExec(String detailTaskId){
        try {
            if (expTaskExecuteDetailLogDaoRepo == null) {
                expTaskExecuteDetailLogDaoRepo = BeanFactoryUtil.getBean(ExpTaskExecuteDetailLogDaoRepo.class);
            }
            if(dataSourceInfoDaoRepo==null){
                dataSourceInfoDaoRepo= BeanFactoryUtil.getBean(DataSourceInfoDaoRepo.class);
            }
            if(expRuleDaoRepo==null){
                expRuleDaoRepo=BeanFactoryUtil.getBean(ExpRuleDaoRepo.class);
            }
            if(expTaskDaoRepo==null){
                expTaskDaoRepo=BeanFactoryUtil.getBean(ExpTaskDaoRepo.class);
            }
            if(expRuleDetailDaoRepo==null){
                expRuleDetailDaoRepo=BeanFactoryUtil.getBean(ExpRuleDetailDaoRepo.class);
            }
            if(dataStoreInfoDaoRepo==null){
                dataStoreInfoDaoRepo=BeanFactoryUtil.getBean(DataStoreInfoDaoRepo.class);
            }
            if(expTaskExecuteDetailLogDao==null){
                expTaskExecuteDetailLogDao=BeanFactoryUtil.getBean(ExpTaskExecuteDetailLogDao.class);
            }
            if(dbMetaDataConfig==null){
                dbMetaDataConfig=BeanFactoryUtil.getBean(DBMetaDataConfig.class);
            }
            if (dataDictionaryService == null) {
                dataDictionaryService = BeanFactoryUtil.getBean(DataDictionaryService.class);
            }
            expTaskExecuteDetailLogModel = expTaskExecuteDetailLogDaoRepo.findByTaskExecuteDetailId(detailTaskId);
            if (expTaskExecuteDetailLogModel != null) {
                dataSourceInfoModel = dataSourceInfoDaoRepo.findByDataSourceId( expTaskExecuteDetailLogModel.getDataSourceId());
            }
            if (expTaskExecuteDetailLogModel != null) {
                dataSource = DBMetaDataConfig.dataSourceMap.get(expTaskExecuteDetailLogModel.getDataSourceId());
                if (dataSource == null) {
                    DBMetaDataConfig.dataSourceMap.put(
                        expTaskExecuteDetailLogModel.getDataSourceId(),
                        dbMetaDataConfig.addPassDataSource(dataSourceInfoModel));
                }

                 expRuleModel = expRuleDaoRepo.findByRuleId(
                 expTaskExecuteDetailLogModel.getRuleId());
                 expTaskModel= expTaskDaoRepo.findByTaskId(expTaskExecuteDetailLogModel.getTaskId());
                 expRuleDetailModel=expRuleDetailDaoRepo.findByRuleDetailId(expTaskExecuteDetailLogModel.getRuleDetailId());
                 dataStoreInfoModel=dataStoreInfoDaoRepo.findByStoreId(expTaskExecuteDetailLogModel.getStoreId());
                 dataStoreInfoModel=dataStoreInfoDaoRepo.findByStoreId(expTaskExecuteDetailLogModel.getStoreId());
                this.getResult();

            }
        }catch (Exception e){
            log.error("singleShardExec error",e);
        }
    }

    private Connection getConnection() throws SQLException {
        Connection connection = dataSource.getConnection();
        connection.setAutoCommit(false);
        return connection;
    }

    @SneakyThrows
    public void getResult() throws IOException {
        BufferedWriter outData = null;
        JSONObject dafjson=new JSONObject();
        String fileNamePre = "";
        String dataFilePath = "";
        boolean isSftp = true;
        int line = 0;
        int totalline = 0;
        Connection connection = this.getConnection();
        try (
            PreparedStatement preparedStatement = connection.prepareStatement(expTaskExecuteDetailLogModel.getSqlStr());
            ResultSet resultSet = preparedStatement.executeQuery();
        ) {
//            connection.setAutoCommit(false);

            log.info("execute sql {} fetchSize {}", expTaskExecuteDetailLogModel.getSqlStr(), fetchSize);

            preparedStatement.setFetchSize(fetchSize);
            Date dataDate = RandomNumsUtil.getDataTime(expRuleDetailModel.getDataUnit(),
                expRuleDetailModel.getDataPeriod());
            String dataTime = RandomNumsUtil.getDataTimeForFileName(dataDate,
               expRuleDetailModel.getDataUnit());
            String cityCode = dataDictionaryService.findById(expTaskModel.getCityCodeId());
            fileNamePre = dataSourceInfoModel.getDataSourceName() + "_" +
                expRuleDetailModel.getSchemaName() + "_" +
                expTaskExecuteDetailLogModel.getRuleTbl() + "_"+ cityCode +"_";
            if (expTaskModel.getBusinessName() != null && !expTaskModel.getBusinessName()
                .equals("")) {
                fileNamePre = fileNamePre +expTaskModel.getBusinessName() + "_";
            }
            fileNamePre = fileNamePre+ dataTime;

            fileNamePre = fileNamePre.toUpperCase(Locale.ROOT);
            String folderPath = localStorePath;
            File folder = new File(folderPath);
            if (!folder.exists() && !folder.isDirectory()) {
                folder.mkdirs();
            }
            String dataFileName = fileNamePre + "_"+expTaskExecuteDetailLogModel.getDataFileSuffix()+".dat";

            dataFilePath = folderPath + "/" + dataFileName;
            File dataFile = new File(dataFilePath);
            if (dataFile.exists()) {
                dataFile.delete();
                dataFile = new File(dataFilePath);
            }
            FileWriter dataFileWriter = new FileWriter(dataFile);
            outData = new BufferedWriter(dataFileWriter);
            DataFileBean dfb = new DataFileBean();
            dfb.setDataFile(dataFile);
            dfb.setFileSize(0L);
            dfb.setFileRow(0L);
            dfb.setFileName(dataFileName);
            log.info("start write resultSet ");

            StringBuilder buffWrite = new StringBuilder();
            while (resultSet.next()) {
                line++;
                totalline++;
                int collength= resultSet.getMetaData().getColumnCount();
                for (int i = 0; i < collength; i++) {
                    Object value = resultSet.getObject(i + 1);
                    if (value instanceof Timestamp) {
                        Timestamp ts = (Timestamp) value;
                        // buffWrite.append( DateUtils.formatDate(new Date(ts.getTime()),"yyyy-MM-dd HH:mm:ss"));
                        outData.write(
                            DateUtils.formatDate(new Date(ts.getTime()), "yyyy-MM-dd HH:mm:ss"));
                    } else if (value instanceof java.sql.Date) {
                        java.sql.Date date2 = (java.sql.Date) value;
                        //buffWrite.append( DateUtils.formatDate(date2,"yyyy-MM-dd HH:mm:ss"));
                        outData.write(DateUtils.formatDate(date2, "yyyy-MM-dd HH:mm:ss"));
                    } else if(value instanceof Clob){
                        Clob clob = (Clob) value;
                        outData.write(clobToString(clob));
                    }
                    else if(value instanceof Blob){
                        Blob blob = (Blob) value;
                        outData.write(blobToString(blob));
                    }
                    else if(value instanceof SQLXML){
                        SQLXML sqlxml = (SQLXML) value;
                        outData.write(xmlToString(sqlxml));
                    }
                    else if (value != null) {
                        String valueStr = String.valueOf(value);
                        //if(valueStr.indexOf("\\n")>-1){
                        // buffWrite.append(valueStr.replaceAll("\\n|\\r",""));
                        outData.write(valueStr.replace("\n", "").replace("\r", ""));
//                        }else{
                        //   buffWrite.append(value);
//                        }

                    }
                    if (i + 1 < collength) {
                        //buffWrite.append(this.expRuleModel.getColSeparator());
                        outData.write(expRuleModel.getColSeparator());
                    }
                }
                // buffWrite.append("\n");
                outData.write("\n");
                if (totalline % printOutSize == 0) {
                    log.info(" table [" + expTaskExecuteDetailLogModel.getRuleTbl() + "] logId ["
                        + expTaskExecuteDetailLogModel.getTaskExecuteDetailId()
                        + "] current line [" + totalline + "]");
                }

                if (line % fetchSize == 0) {
                    //buffWrite.deleteCharAt(buffWrite.length()-1);
                    //outData.write(buffWrite.toString());
                    // buffWrite.setLength(0);
                    outData.flush();
                }
            }

            if (buffWrite.length() > 0) {
//                if(line!=fetchSize && !isSplitPage){
//                    outData.write("\n");
//                }
                //buffWrite.deleteCharAt(buffWrite.length()-1);
                outData.write(buffWrite.toString());
            }
            outData.flush();

            dafjson.put("fileName",dataFileName);
            dafjson.put("fileSize",dataFile.length());
            dafjson.put("fileRow",(long) line);
            log.info(" table [" + expTaskExecuteDetailLogModel.getRuleTbl() + "] export end total [" + totalline + "] ");

        } catch (Exception e) {
            log.error("getResult IOException error", e);

            ExpTaskExecuteLogDao expTaskExecuteLogDao = BeanFactoryUtil.getBean(ExpTaskExecuteLogDao.class);
            String taskExecuteDetailId = expTaskExecuteDetailLogModel.getTaskExecuteDetailId();
            String executeLogId = expTaskExecuteDetailLogDao.findExecuteLogIdByExeDetailId(taskExecuteDetailId);
            expTaskExecuteLogDao.updateExeuceStatus(executeLogId, 3, 0, "", "");

           expTaskExecuteDetailLogModel.setExpStatus(-1);//导出异常
            expTaskExecuteDetailLogModel.setSftpeEndtime(new Date());
            expTaskExecuteDetailLogModel.setErrorMsg(
                "{sql:" + expTaskExecuteDetailLogModel.getRuleTbl() + ",error_msg:" + e.getMessage() + "}");
             expTaskExecuteDetailLogDao.updateExpStatus(expTaskExecuteDetailLogModel);
            isSftp = false;
            retryCount++;
            if(retryCount<=3){
                log.info("start retry count : {}",retryCount);
                getResult();

            }
        } finally {
            if (outData != null) {
                outData.close();
            }
            connection.close();
            //            if (outVerify != null) {
//                outVerify.close();
//            }
        }
        if (isSftp) {
            expTaskExecuteDetailLogModel.setDataFileInfo(dafjson.toJSONString());
            expTaskExecuteDetailLogModel.setDataFileNamePrefix(fileNamePre);
            expTaskExecuteDetailLogModel.setDataFilePath(dataFilePath);
            expTaskExecuteDetailLogModel.setExpStatus(2);//导出完成
            expTaskExecuteDetailLogModel.setExportEndTime(new Date());
            expTaskExecuteDetailLogModel.setDataFileCount(totalline);
            this.expTaskExecuteDetailLogDao.updateExpStatus(expTaskExecuteDetailLogModel);
            //开始上传
           // String[] datafilePathArr = dataFilePaths.split(",");
           expTaskExecuteDetailLogModel.setExpStatus(3);//上传中
           expTaskExecuteDetailLogModel.setSftpStartTime(new Date());
            this.expTaskExecuteDetailLogDao.updateExpStatus(expTaskExecuteDetailLogModel);

            //if (datafilePathArr.length > 0) {
              //  for (int i = 0; i < datafilePathArr.length; i++) {
              this.uploadBySftp(dataFilePath,expRuleModel,expTaskExecuteDetailLogModel);
               // }
           // }
           // this.uploadBySftp(verifyFilePath,expRuleModel,expTaskExecuteDetailLogModel);

           expTaskExecuteDetailLogModel.setExpStatus(4);//上传完成
           expTaskExecuteDetailLogModel.setSftpeEndtime(new Date());
            this.expTaskExecuteDetailLogDao.updateExpStatus(expTaskExecuteDetailLogModel);
        }
        //将结果写入到文件中
        //this.createFile(contextList);

    }


    public void uploadBySftp(String file,ExpRuleModel expRuleModel,
        ExpTaskExecuteDetailLogModel expTaskExecuteDetailLogModel) {
        ChannelSftp channelSftp = null;
        try {
            log.info("start sftp connection");
            channelSftp = FtpUtil.connectBySftp(dataStoreInfoModel.getHost(),
                dataStoreInfoModel.getUserName(),
                TEAUtil.decode(dataStoreInfoModel.getPassword()));
            log.info("end sftp connection");

            FtpUtil.uploadFileBySftp(file, dataStoreInfoModel.getRemotePath(),
                channelSftp, expRuleModel.getIsFile());
            FtpUtil.disconnect(channelSftp);

        } catch (Exception e) {
            log.error("uploadBySftp error ", e);
            expTaskExecuteDetailLogModel.setErrorMsg(e.getMessage());
            expTaskExecuteDetailLogModel.setExpStatus(-3);//上传异常
            expTaskExecuteDetailLogModel.setSftpeEndtime(new Date());
             expTaskExecuteDetailLogDao.updateExpStatus(expTaskExecuteDetailLogModel);
            FtpUtil.disconnect(channelSftp);
        }


    }

    private static String clobToString(Clob clob) throws SQLException, IOException {
        StringBuilder sb = new StringBuilder();
        try (Reader reader = clob.getCharacterStream()) {
            char[] buffer = new char[1024];
            int bytesRead;
            while ((bytesRead = reader.read(buffer)) != -1) {
                sb.append(buffer, 0, bytesRead);
            }
        }
        return sb.toString();
    }

    private static String blobToString(Blob blob) throws SQLException, IOException {
        byte[] bytes = blob.getBytes(1, (int) blob.length());
        return new String(bytes);
    }

    private static String xmlToString(SQLXML xml) throws SQLException {
        return xml.getString();
    }
}
