package com.huatai.bi.service;

import com.alibaba.datax.core.Engine;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.toolkit.IdWorker;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.huatai.bi.config.DataxConfig;
import com.huatai.bi.constant.KafkaConstant;
import com.huatai.bi.dto.kafka.DataSetDTO;
import com.huatai.bi.entity.*;
import com.huatai.bi.handler.dataSet.Database;
import com.huatai.bi.mapper.DataSynrecordMapper;
import com.huatai.bi.service.impl.ClickHouseJDBCService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import org.springframework.util.ObjectUtils;

import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.stream.Collectors;

/**
 * @program: itc-airport-cloud
 * @description: 数据异步执行
 * @author: songxiude
 * @create: 2024-07-05 14:06
 **/
@Service
@Slf4j
public class DataSynApiService {
//    @Autowired 弃用
//    private DataSynrecordService dataSynrecordService;

    @Autowired
    private DataSynrecordMapper synrecordMapper;

    @Autowired
    private DatasourceService datasourceService;

    @Autowired
    private TableService tableService;

    @Autowired
    private TableColumnService tableColumnService;

    @Autowired
    private XxlJobInfoService xxlJobInfoService;

    @Autowired
    private DataxConfig dataxConfig;

    @Autowired
    private ClickHouseJDBCService jdbcService;

    @Autowired
    private DirectoryService directoryService;

    @Autowired
    private KafkaTemplate<String, Object> kafkaTemplate;

    @Autowired
    private DirectDataSetService directDataSetService;

    @Autowired
    private DataSetColumnService dataSetColumnService;

    @Autowired
    private Database database;

    @Async("asyncServiceExecutor")
    public void execDataSyn(JSONObject jsonObject, String typename) {
        //1、原表获取连接信息
        log.info("任务ID是：" + jsonObject.getLong("id"));
        DataSynrecordEntity dataSynrecord = synrecordMapper.selectById(jsonObject.getLong("id"));
        if (ObjectUtils.isEmpty(dataSynrecord)) {
            log.info("没有同步记录");
            return;
        }
        //如果上一次未执行完退出执行
        if (dataSynrecord.getIsExec().intValue() == 1) {
            //判断改执行记录是否属于长时间占用
            Date updateTime = dataSynrecord.getUpdateTime();
            long time = (System.currentTimeMillis() - updateTime.getTime()) / 1000;
            log.info(dataSynrecord.getTargetTableName() + "正在同步距离上次执行时间：" + time + "秒");
            //如果大于12小时重置执行状态
            if (time > 43200) {
                dataSynrecord.setIsExec(0);
                synrecordMapper.updateById(dataSynrecord);
            } else {
                return;
            }
        }
        TableEntity tableId = tableService.getById(jsonObject.getLong("tableId"));
        log.info(tableId.getTableName() + ":开始执行数据同步");
        DatasourceEntity decodeData = datasourceService.getDecodeData(tableId.getDatasourceId());
        String datatype = decodeData.getType().toLowerCase();

        //2、组装运行条件
        String sql = "SELECT ";
        StringBuilder fields = new StringBuilder();
        //判断增量字段是否有变动
        if (dataSynrecord.getSynType().intValue() == 1) {
            //同步字段
            this.checkUpdateTableColumn(tableId.getId(), dataSynrecord.getDtId());
        }
        //组装查询写入字段
        List<DataSetColumnEntity> dcolumnEntities = dataSetColumnService.list(Wrappers.<DataSetColumnEntity>lambdaQuery()
                .eq(DataSetColumnEntity::getDataSetId, dataSynrecord.getDtId())
                .orderByAsc(DataSetColumnEntity::getColumnPosition));
        if (CollectionUtils.isEmpty(dcolumnEntities)) {
            log.info(tableId.getTableName() + ":查询不到数据集字段");
            return;
        }
        for (int i = 0; i < dcolumnEntities.size(); i++) {
            DataSetColumnEntity dataSetColumnEntity = dcolumnEntities.get(i);
            sql += dataSetColumnEntity.getColumnName();
            if (i == 0) {
                fields.append(dataSetColumnEntity.getColumnName() + "\"");
            } else if (i != 0 && i != dcolumnEntities.size() - 1) {
                fields.append("\"" + dataSetColumnEntity.getColumnName() + "\"");
            } else if (i + 1 == dcolumnEntities.size()) {
                fields.append("\"" + dataSetColumnEntity.getColumnName());
            }
            //fields.append(entity.getColumnName());
            if (dcolumnEntities.size() - 1 != i) {
                sql += ",";
                //fields.append(",");
                //fields += ",";
            }
        }
        sql += " FROM " + tableId.getTableName();
        //pgsql模式
        String readerJdbcUrl = decodeData.getJdbcUrl();
        if (datatype.equals("postgresql") && StringUtils.isNotEmpty(decodeData.getSchemaName())) {
            if (readerJdbcUrl.contains("?")) {
                readerJdbcUrl += "&currentSchema=" + decodeData.getSchemaName();
            } else {
                readerJdbcUrl += "?currentSchema=" + decodeData.getSchemaName();
            }
        }

        String mode = "insert";
        //如果是增量
        if (dataSynrecord.getSynType().intValue() == 2 && dataSynrecord.getExecNumber() > 0) {
            String function = this.getDBStringWriterFunction(dataxConfig.getWriterTargetDB());
            //组装增量条件
            //String whereSql = "SELECT " + function + dataSynrecord.getSynWhere() + ") as " + dataSynrecord.getSynWhere() + " FROM " + dataSynrecord.getTargetTableName() + " WHERE " + dataSynrecord.getSynWhere() + " IS NOT NULL ORDER BY " + dataSynrecord.getSynWhere() + " DESC LIMIT 1";
            // String whereSql = "SELECT " + dataSynrecord.getSynWhere() + " FROM " + dataSynrecord.getTargetTableName() + " WHERE " + dataSynrecord.getSynWhere() + " IS NOT NULL ORDER BY " + dataSynrecord.getSynWhere() + " DESC LIMIT 1";
            String whereSql = "SELECT " + function + "A." + dataSynrecord.getSynWhere() + ") as " + dataSynrecord.getSynWhere() + "  FROM (SELECT " + dataSynrecord.getSynWhere() + " FROM " + dataSynrecord.getTargetTableName() + " WHERE " + dataSynrecord.getSynWhere() + " IS NOT NULL ORDER BY id DESC LIMIT 1) A";
            log.info("数仓条件获取：{}", whereSql);

            List<LinkedHashMap<String, Object>> hashMaps = jdbcService.queryTableBySql(whereSql);
            if (!CollectionUtils.isEmpty(hashMaps)) {

                //String readerFunction = this.getDBStringReaderFunction(datatype);
                LinkedHashMap<String, Object> hashMap = hashMaps.get(0);

                String field = (String) hashMap.get(dataSynrecord.getSynWhere());
                //走增量
                //mode = "update";
                //mode = "replace";
                //sql += " WHERE " + readerFunction + dataSynrecord.getSynWhere() + ")>" + "'" + field + "' AND " + readerFunction + dataSynrecord.getSynWhere() + ")!=" + "'" + field + "'";
                sql += " WHERE " + dataSynrecord.getSynWhere() + ">" + "'" + field + "'";

            }

        }

        String targetTableName = dataSynrecord.getTargetTableName();
        String targetWornTableName = dataSynrecord.getTargetTableName();
        //如何是全量并且之前执行过
        if (dataSynrecord.getSynType().intValue() == 1 && dataSynrecord.getExecNumber() > 0) {
            String worn = targetTableName.substring(targetTableName.lastIndexOf("_"));
            String newSuf = "_" + IdWorker.getId();
            targetTableName = targetTableName.replace(worn, newSuf);
            //先创建新表
            jdbcService.createTableSourceDS(targetTableName, dataSynrecord.getDtId(), dataSynrecord.getSynWhere());

        }

        log.info("执行查询sql:" + sql);
        String execPath = dataxConfig.getExecPath() + "/job/" + datatype + ".json";
        //3、组装动态参数
        System.setProperty("datax.home", dataxConfig.getExecPath());
        System.setProperty("reader_username", decodeData.getUserName());
        System.setProperty("reader_password", decodeData.getPassword());
        System.setProperty("reader_sql", sql);
        System.setProperty("reader_jdbc", readerJdbcUrl);
        System.setProperty("writer_target_db", dataxConfig.getWriterTargetDB());
        System.setProperty("writer_jdbc", dataxConfig.getWriterJdbc());
        System.setProperty("writer_username", dataxConfig.getUsername());
        System.setProperty("writer_password", dataxConfig.getPassword());
        System.setProperty("writer_fields", fields.toString());
        System.setProperty("writer_mode", mode);
        System.setProperty("writer_table", targetTableName);
        //String[] datxArgs2 = {"-job", BiSystemUtil.getCurrentClasspath() + "/datax/" + datatype + ".json", "-mode", "standalone", "-jobid", "-1"};
        String[] datxArgs2 = {"-job", execPath, "-mode", "standalone", "-jobid", "-1"};
        //4、执行数据同步
        try {
            long st = System.currentTimeMillis();
            dataSynrecord.setIsExec(1);
            synrecordMapper.updateById(dataSynrecord);
            Engine.entry(datxArgs2);
            log.info("本次" + tableId.getTableName() + "耗时（毫秒）：" + (System.currentTimeMillis() - st));
            //执行结束更新执行次数
            dataSynrecord.setIsExec(0);
            if (dataSynrecord.getSynType().intValue() == 1 && dataSynrecord.getExecNumber() > 0) {
                dataSynrecord.setTargetTableName(targetTableName);
                dataSynrecord.setTargetWornTable(targetWornTableName);
                //数据同步完成执行数据集回填
                DirectoryEntity directoryEntity = directoryService.getOne(Wrappers.<DirectoryEntity>lambdaQuery().eq(DirectoryEntity::getDtId, dataSynrecord.getDtId()));
                directoryEntity.setTableName(targetTableName);
                directoryService.saveOrUpdate(directoryEntity);
                //删除老表数据
                jdbcService.dropTable(targetWornTableName);
//                //通知数据集已经更新
//                DataSetDTO dataSetDTO = new DataSetDTO();
//                dataSetDTO.setDtId(dataSynrecord.getDtId());
//                dataSetDTO.setType(typename);
//                kafkaTemplate.send(KafkaConstant.KAFKA_TOPIC_DATA_SET_PROCESS, JSON.toJSONString(dataSetDTO));
            }
            //通知数据集已经更新（20240820要求增量全量都通知）
            DataSetDTO dataSetDTO = new DataSetDTO();
            dataSetDTO.setDtId(dataSynrecord.getDtId());
            dataSetDTO.setType(typename);
            kafkaTemplate.send(KafkaConstant.KAFKA_TOPIC_DATA_SET_PROCESS, JSON.toJSONString(dataSetDTO));
            //更新执行
            dataSynrecord.setExecNumber(dataSynrecord.getExecNumber() + 1);
            synrecordMapper.updateById(dataSynrecord);
        } catch (Throwable e) {
            e.printStackTrace();
        } finally {
            //最后修改执行状态
            dataSynrecord.setIsExec(0);
            synrecordMapper.updateById(dataSynrecord);
        }
    }


    /**
     * 检查并更新表字段
     *
     * @param tableId
     * @param dtId
     * @return
     */
    public boolean checkUpdateTableColumn(Long tableId, Long dtId) {
        List<TableColumnEntity> columnEntities = tableColumnService.list(Wrappers.<TableColumnEntity>lambdaQuery()
                .eq(TableColumnEntity::getTableId, tableId)
                .orderByAsc(TableColumnEntity::getColumnPosition)
        );
        if (CollectionUtils.isEmpty(columnEntities)) {
            return false;
        }
        List<DataSetColumnEntity> entityList = dataSetColumnService.list(Wrappers.<DataSetColumnEntity>lambdaQuery().eq(DataSetColumnEntity::getDataSetId, dtId));
        if (CollectionUtils.isEmpty(entityList)) {
            return false;
        }
        //判断两个列表差集并更新字段信息
        List<String> column1 = columnEntities.stream().map(TableColumnEntity::getColumnName).collect(Collectors.toList());
        List<String> column2 = entityList.stream().map(DataSetColumnEntity::getColumnName).collect(Collectors.toList());
        List<String> diff1 = column1.stream().filter(str -> !column2.contains(str)).collect(Collectors.toList());
        List<String> diff2 = column2.stream().filter(str -> !column1.contains(str)).collect(Collectors.toList());

        //差集1写入数据集字段
        if (!CollectionUtils.isEmpty(diff1)) {
            DirectDataSetEntity dataSetEntity = directDataSetService.getById(dtId);
            List<DataSetColumnEntity> column = database.column(dataSetEntity);
            List<DataSetColumnEntity> columnNew = column.stream().filter(dataSetColumnEntity -> diff1.contains(dataSetColumnEntity.getColumnName())).collect(Collectors.toList());
            dataSetColumnService.saveBatch(columnNew);
        }
        //差集2删除数据集字段
        if (!CollectionUtils.isEmpty(diff2)) {
            dataSetColumnService.remove(Wrappers.<DataSetColumnEntity>lambdaQuery()
                    .eq(DataSetColumnEntity::getDataSetId, dtId)
                    .in(DataSetColumnEntity::getColumnName, diff2));
        }
        return true;
    }


    /**
     * 异步调异步存在问题第一次执行同步操作(弃用)
     *
     * @param jsonObject
     */
    public void startDataSyn(JSONObject jsonObject) {
        //1、原表获取连接信息
        TableEntity tableId = tableService.getById(jsonObject.getLong("tableId"));
        log.info("任务ID是：" + jsonObject.getLong("id"));
        log.info(tableId.getTableName() + ":开始执行数据同步");
        DatasourceEntity decodeData = datasourceService.getDecodeData(tableId.getDatasourceId());
        String datatype = decodeData.getType().toLowerCase();
        //2、组装运行条件
        String sql = "SELECT ";
        List<TableColumnEntity> columnEntities = tableColumnService.list(Wrappers.<TableColumnEntity>lambdaQuery()
                .eq(TableColumnEntity::getTableId, tableId.getId())
                .orderByAsc(TableColumnEntity::getColumnPosition)
        );
        if (CollectionUtils.isEmpty(columnEntities)) {
            return;
        }
        //pgsql模式
        String readerJdbcUrl = decodeData.getJdbcUrl();
        if (datatype.equals("postgresql") && StringUtils.isNotEmpty(decodeData.getSchemaName())) {
            if (readerJdbcUrl.contains("?")) {
                readerJdbcUrl += "&currentSchema=" + decodeData.getSchemaName();
            } else {
                readerJdbcUrl += "?currentSchema=" + decodeData.getSchemaName();
            }
        }
        DataSynrecordEntity dataSynrecord = synrecordMapper.selectById(jsonObject.getLong("id"));
        if (ObjectUtils.isEmpty(dataSynrecord)) {
            log.info("没有同步记录");
            return;
        }
        //如果上一次未执行完退出执行
        if (dataSynrecord.getIsExec().intValue() == 1) {
            log.info(tableId.getTableName() + ":正在同步");
            return;
        }
        String mode = "insert";
        StringBuilder fields = new StringBuilder();
        for (int i = 0; i < columnEntities.size(); i++) {
            TableColumnEntity entity = columnEntities.get(i);
            sql += entity.getColumnName();
            if (i == 0) {
                fields.append(entity.getColumnName() + "\"");
            } else if (i != 0 && i != columnEntities.size() - 1) {
                fields.append("\"" + entity.getColumnName() + "\"");
            } else if (i + 1 == columnEntities.size()) {
                fields.append("\"" + entity.getColumnName());
            }
            //fields.append(entity.getColumnName());
            if (columnEntities.size() - 1 != i) {
                sql += ",";
                //fields.append(",");
                //fields += ",";
            }
        }

        sql += " FROM " + tableId.getTableName();
        //如果是增量
        if (dataSynrecord.getSynType().intValue() == 2 && dataSynrecord.getExecNumber() > 0) {
            String function = this.getDBStringWriterFunction(dataxConfig.getWriterTargetDB());
            //组装增量条件
            String whereSql = "SELECT " + function + dataSynrecord.getSynWhere() + ") as " + dataSynrecord.getSynWhere() + " FROM " + dataSynrecord.getTargetTableName() + " WHERE " + dataSynrecord.getSynWhere() + " IS NOT NULL ORDER BY " + dataSynrecord.getSynWhere() + " DESC LIMIT 1";
            List<LinkedHashMap<String, Object>> hashMaps = jdbcService.queryTableBySql(whereSql);
            if (!CollectionUtils.isEmpty(hashMaps)) {

                String readerFunction = this.getDBStringReaderFunction(datatype);
                LinkedHashMap<String, Object> hashMap = hashMaps.get(0);

                String field = (String) hashMap.get(dataSynrecord.getSynWhere());
                //走增量
                //mode = "update";
                //mode = "replace";
                sql += " WHERE " + readerFunction + dataSynrecord.getSynWhere() + ")>" + "'" + field + "' AND " + readerFunction + dataSynrecord.getSynWhere() + ")!=" + "'" + field + "'";

            }

        }

        String targetTableName = dataSynrecord.getTargetTableName();
        String targetWornTableName = dataSynrecord.getTargetTableName();
        //如何是全量并且之前执行过
        if (dataSynrecord.getSynType().intValue() == 1 && dataSynrecord.getExecNumber() > 0) {
            String worn = targetTableName.substring(targetTableName.lastIndexOf("_"));
            String newSuf = "_" + IdWorker.getId();
            targetTableName = targetTableName.replace(worn, newSuf);
            //先创建新表
            jdbcService.createTableSourceDS(targetTableName, dataSynrecord.getDtId(), dataSynrecord.getSynWhere());

        }

        log.info("执行查询sql:" + sql);
        String execPath = dataxConfig.getExecPath() + "/job/" + datatype + ".json";
        //3、组装动态参数
        System.setProperty("datax.home", dataxConfig.getExecPath());
        System.setProperty("reader_username", decodeData.getUserName());
        System.setProperty("reader_password", decodeData.getPassword());
        System.setProperty("reader_sql", sql);
        System.setProperty("reader_jdbc", readerJdbcUrl);
        System.setProperty("writer_target_db", dataxConfig.getWriterTargetDB());
        System.setProperty("writer_jdbc", dataxConfig.getWriterJdbc());
        System.setProperty("writer_username", dataxConfig.getUsername());
        System.setProperty("writer_password", dataxConfig.getPassword());
        System.setProperty("writer_fields", fields.toString());
        System.setProperty("writer_mode", mode);
        System.setProperty("writer_table", targetTableName);
        //String[] datxArgs2 = {"-job", BiSystemUtil.getCurrentClasspath() + "/datax/" + datatype + ".json", "-mode", "standalone", "-jobid", "-1"};
        String[] datxArgs2 = {"-job", execPath, "-mode", "standalone", "-jobid", "-1"};
        //4、执行数据同步
        try {
            long st = System.currentTimeMillis();
            dataSynrecord.setIsExec(1);
            synrecordMapper.updateById(dataSynrecord);
            Engine.entry(datxArgs2);
            log.info("本次" + tableId.getTableName() + "耗时（毫秒）：" + (System.currentTimeMillis() - st));
            //执行结束更新执行次数
            dataSynrecord.setIsExec(0);
            if (dataSynrecord.getSynType().intValue() == 1 && dataSynrecord.getExecNumber() > 0) {
                dataSynrecord.setTargetTableName(targetTableName);
                dataSynrecord.setTargetWornTable(targetWornTableName);
                //数据同步完成执行数据集回填
                DirectoryEntity directoryEntity = directoryService.getOne(Wrappers.<DirectoryEntity>lambdaQuery().eq(DirectoryEntity::getDtId, dataSynrecord.getDtId()));
                directoryEntity.setTableName(targetTableName);
                directoryService.saveOrUpdate(directoryEntity);
                //删除老表数据
                jdbcService.dropTable(targetWornTableName);
                //通知数据集已经更新
                DataSetDTO dataSetDTO = new DataSetDTO();
                dataSetDTO.setDtId(dataSynrecord.getDtId());
                kafkaTemplate.send(KafkaConstant.KAFKA_TOPIC_DATA_SET_PROCESS, JSON.toJSONString(dataSetDTO));
            }
            dataSynrecord.setExecNumber(dataSynrecord.getExecNumber() + 1);
            synrecordMapper.updateById(dataSynrecord);
        } catch (Throwable e) {
            e.printStackTrace();
        } finally {
            //最后修改执行状态
            dataSynrecord.setIsExec(0);
            synrecordMapper.updateById(dataSynrecord);
        }
    }


    /**
     * 启动任务调度
     *
     * @param taskIds
     */
    @Async("asyncServiceExecutor")
    public void execXxlJobStart(List<Integer> taskIds) {
        taskIds.forEach(taskid -> {
            Boolean start = xxlJobInfoService.start(taskid);
            log.info(taskid + "任务调度启动状态:" + start);
        });
    }

    /**
     * 启动任务调度
     *
     * @param taskId
     */
    @Async("asyncServiceExecutor")
    public void execXxlJobStartOne(Integer taskId) {
        Boolean start = xxlJobInfoService.start(taskId);
        log.info(taskId + "任务调度启动状态:" + start);
    }

    /**
     * 获取个数据库转字符串函数
     *
     * @param writerDb
     * @return
     */
    public String getDBStringWriterFunction(String writerDb) {
        if (writerDb.equals("mysqlwriter")) {
            return "CONCAT(";
        } else if (writerDb.equals("clickhousewriter")) {
            return "toString(";
        } else if (writerDb.equals("gaussdbwriter")) {
            return "CONCAT(";
        } else if (writerDb.equals("oraclewriter")) {
            return "CONCAT(";
        } else if (writerDb.equals("postgresqlwriter")) {
            return "CONCAT(";
        }
        return "CONCAT(";
    }

    /**
     * 获取读个数据库转字符串函数
     *
     * @param readerDb
     * @return
     */
    public String getDBStringReaderFunction(String readerDb) {
        if (readerDb.equals("mysql")) {
            return "CONCAT(";
        } else if (readerDb.equals("clickhouse")) {
            return "toString(";
        } else if (readerDb.equals("gaussdb")) {
            return "CONCAT(";
        } else if (readerDb.equals("oracle")) {
            return "CONCAT(";
        } else if (readerDb.equals("postgresql")) {
            return "CONCAT(";
        }
        return "CONCAT(";
    }

    /**
     * 判断对象类型
     *
     * @param obj
     * @return
     */
    public String getTypeName(Object obj) {
        if (obj instanceof String) {
            return "String";
        } else if (obj instanceof Integer) {
            return "Integer";
        } else if (obj instanceof Long) {
            return "Long";
        } else if (obj instanceof Double) {
            return "Double";
        } else if (obj instanceof Boolean) {
            return "Boolean";
        } else {
            return obj.getClass().getName();
        }
    }

    /**
     * 执行mqtt数据更新
     */
    public void execMqttDataUpdate() {
        List<DirectDataSetEntity> list = directDataSetService.list(Wrappers.<DirectDataSetEntity>lambdaQuery().eq(DirectDataSetEntity::getType, "MQTT"));
        if (CollectionUtils.isEmpty(list)) {
            return;
        }
        list.forEach(direct -> {
            //通知数据集已经更新
            DataSetDTO dataSetDTO = new DataSetDTO();
            dataSetDTO.setDtId(direct.getId());
            kafkaTemplate.send(KafkaConstant.KAFKA_TOPIC_DATA_SET_PROCESS, JSON.toJSONString(dataSetDTO));
        });
    }
}
