package com.hex.ds.hdtp.core.app.data.transfer.handler.impl;

import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.StrUtil;
import com.hex.ds.hdtp.core.app.common.entity.BasicContrastInfo;
import com.hex.ds.hdtp.core.app.common.util.JaveShellUtil;
import com.hex.ds.hdtp.core.app.data.configuration.dto.requestDto.TableConvertRequestDto;
import com.hex.ds.hdtp.core.app.data.configuration.dto.response.TableConvertResponseDto;
import com.hex.ds.hdtp.core.app.data.configuration.service.ITableConvertService;
import com.hex.ds.hdtp.core.app.data.contrast.service.IContrastTableStructureService;
import com.hex.ds.hdtp.core.app.data.table.dto.response.TableTransferFieldMapResponseDto;
import com.hex.ds.hdtp.core.app.data.transfer.converter.IDataTransferLogConverter;
import com.hex.ds.hdtp.core.app.data.transfer.converter.IDataTransferSqlStructureConverter;
import com.hex.ds.hdtp.core.app.data.transfer.dto.DataTransferSqlStructureDto;
import com.hex.ds.hdtp.core.app.data.transfer.dto.request.DataTransferLogRequestDto;
import com.hex.ds.hdtp.core.app.data.transfer.dto.response.DataTransferListResponseDto;
import com.hex.ds.hdtp.core.app.data.transfer.handler.IDataTransferHandlerFlinkService;
import com.hex.ds.hdtp.core.app.data.transfer.service.IDataTransferListService;
import com.hex.ds.hdtp.core.app.data.transfer.service.IDataTransferLogService;
import com.hex.ds.hdtp.core.app.data.transfer.service.IDataTransferTaskService;
import com.hex.ds.hdtp.core.app.metadata.service.ISourceInfoService;
import com.hex.ds.hdtp.core.inf.data.contrast.service.impl.DataInfoSelectHandlerContext;
import com.hex.ds.hdtp.core.inf.metadata.po.SourceInfoPo;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;

@Slf4j
@Service
@RequiredArgsConstructor(onConstructor_ = {@Lazy, @Autowired})
public abstract class AbstractDataTransferHandlerFlinkService implements IDataTransferHandlerFlinkService {
    private final IDataTransferTaskService dataTransferTaskService;

    private final IDataTransferListService dataTransferListService;

    private final IDataTransferLogService dataTransferLogService;

    private final IDataTransferLogConverter dataTransferLogConverter;

    private final ITableConvertService tableConvertService;

    private final IDataTransferSqlStructureConverter dataTransferSqlStructureConverter;

    private final IContrastTableStructureService contrastTableStructureService;

    private final DataInfoSelectHandlerContext dataInfoSelectHandlerContext;

    private final JaveShellUtil javaShellUtil;

    protected final ISourceInfoService sourceInfoService;

    protected static final String LINEFEED = "\r\n";

    protected final static String FLINK_SOURCE_PREFIX = "SOURCE_";

    protected final static String FLINK_TARGET_PREFIX = "TARGET_";

    @Value("${data.transfer.sql-save-path}")
    private String sqlSavePath;

    @Value("${data.transfer.script-path}")
    private String scriptPath;

    @Value("${servers.isUse}")
    private boolean isUse;

    /*
     * @Method: dataTransfer <br>
     * @Param: [dataTransferSqlStructureDto, isDataAnalysis] <br>
     * @Return: void <br>
     * @Description：数据迁移执行<br>
     * @Author： wz.li<br>
     * @Date： 2023/8/24 17:44 <br>
     * @Version： V2.0.2<br>
     */
    @Override
    @Async("dataTransferAsyncTaskExecutor")
    public void dataFlinkTransfer(DataTransferSqlStructureDto dataTransferSqlStructureDto, boolean isDataAnalysis) {
        boolean dataTransferSuccess = true;
        String dataAnalysisResult = dataAnalysis(dataTransferSqlStructureDto, isDataAnalysis);
        for (DateTime dateTime : dataTransferSqlStructureDto.getBizDates()) {
            String bizDate = DateUtil.format(dateTime, "yyyyMMdd");
            String dataTransferLogId = addDataTransferLog(dataTransferSqlStructureDto, bizDate, dataAnalysisResult, isDataAnalysis);
            if (!isDataAnalysis || StrUtil.isBlank(dataAnalysisResult)) {
                dataTransferSuccess = doDataTransfer(dataTransferLogId, bizDate, dataTransferSqlStructureDto);
            } else {
                dataTransferSuccess = false;
            }
        }
        updateDataTransferTask(dataTransferSqlStructureDto, dataTransferSuccess);
    }

    /*
     * @Method: updateDataTransferTask <br>
     * @Param: [dataTransferTaskPkId, dataTransferSuccess] <br>
     * @Return: void <br>
     * @Description：更新数据迁移任务成功、失败数量和状态<br>
     * @Author： wz.li<br>
     * @Date： 2023/8/24 17:47 <br>
     * @Version： V2.0.2<br>
     */
    private void updateDataTransferTask(DataTransferSqlStructureDto dataTransferSqlStructureDto, boolean dataTransferSuccess) {
        if (dataTransferSuccess) {
            dataTransferTaskService.modifySuccessNum(dataTransferSqlStructureDto.getDataTransferTaskPkId());
            if (dataTransferSqlStructureDto.getSourceDatabaseType().equalsIgnoreCase("IMPALA")) {
                refreshImpalaMetadata(dataTransferSqlStructureDto);
            }
        } else {
            dataTransferTaskService.modifyFailNum(dataTransferSqlStructureDto.getDataTransferTaskPkId());
        }
        dataTransferTaskService.modifyStatus(dataTransferSqlStructureDto.getDataTransferTaskPkId());
    }


    /*
     * @Method: dataAnalysis <br>
     * @Param: [dataTransferSqlStructureDto, isDataAnalysis] <br>
     * @Return: java.lang.String <br>
     * @Description：数据迁移前数据分析（源表和目标表是否存在、源表和目标表字段对比、源表是否为空）<br> 1
     * @Author： wz.li<br>
     * @Date： 2023/8/24 17:44 <br>
     * @Version： V2.0.2<br>
     */
    protected String dataAnalysis(DataTransferSqlStructureDto dataTransferSqlStructureDto, boolean isDataAnalysis) {
        if (isDataAnalysis) {
            SourceInfoPo sourceSource = sourceInfoService.queryByPkId(dataTransferSqlStructureDto.getSourceConnPkId());
            SourceInfoPo targetSource = sourceInfoService.queryByPkId(dataTransferSqlStructureDto.getTargetConnPkId());
            BasicContrastInfo basicContrastInfo = dataTransferSqlStructureConverter
                    .toBasicContrastInfo(dataTransferSqlStructureDto, sourceSource, targetSource);
            if (!contrastTableStructureService.contrastCreateTable(basicContrastInfo).getResult()) {
                return "源表或目标表不存在";
            } else if (!contrastTableStructureService.contrastFieldNameWithMap(basicContrastInfo, dataTransferSqlStructureDto.getDataTransferListPkId()).getResult()) {
                return "源表和目标表字段映射存在问题";
            } else if (!isSourceTableEmpty(sourceSource, dataTransferSqlStructureDto)) {
                return "源表数据为空，或源表数据量分析异常";
            }
        }
        return null;
    }

    /*
     * @Method: isSourceTableEmpty <br>
     * @Param: [sourceSource, dataTransferSqlStructureDto] <br>
     * @Return: boolean <br>
     * @Description：源表数据是否为空判断<br>1
     * @Author： wz.li<br>
     * @Date： 2023/8/24 17:45 <br>
     * @Version： V2.0.2<br>
     */
    private boolean isSourceTableEmpty(SourceInfoPo sourceSource, DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        try {
            Long sourceInfoNum = dataInfoSelectHandlerContext.getDataInfoService(sourceSource.getSourceType())
                    .selectIsHaveData(sourceSource.getSourceCode(), dataTransferSqlStructureDto.getSourceDatabase(),
                            dataTransferSqlStructureDto.getSourceTable());
            return sourceInfoNum > 0;
        } catch (Exception e) {
            log.error("数据迁移-数据分析-源表数据为空数据分析异常：{}", e.getMessage(), e);
            return false;
        }
    }

    /*
     * @Method: addDataTransferLog <br>
     * @Param: [dataTransferSqlStructureDto, bizDate, dataAnalysisResult, isDataAnalysis] <br>
     * @Return: java.lang.String <br>
     * @Description：新增数据迁移记录<br>1
     * @Author： wz.li<br>
     * @Date： 2023/8/24 17:46 <br>
     * @Version： V2.0.2<br>
     */
    protected String addDataTransferLog(DataTransferSqlStructureDto dataTransferSqlStructureDto, String bizDate,
                                        String dataAnalysisResult, boolean isDataAnalysis) {
        DataTransferListResponseDto dataTransferList = dataTransferListService.queryById(dataTransferSqlStructureDto.getDataTransferListPkId());
        DataTransferLogRequestDto dataTransferLog = dataTransferLogConverter.toDto(dataTransferList, bizDate, dataAnalysisResult);
        if (isDataAnalysis && StrUtil.isNotBlank(dataAnalysisResult)) {
            dataTransferLog.setStatus("FAIL").setEndTime(DateUtil.now());
        }
        return dataTransferLogService.add(dataTransferLog);
    }

    /*
     * @Method: doDataTransfer <br>
     * @Param: [dataTransferLogId, bizDate, dataTransferSqlStructureDto] <br>
     * @Return: boolean <br>
     * @Description：执行数据迁移<br>1
     * @Author： wz.li<br>
     * @Date： 2023/8/24 17:46 <br>
     * @Version： V2.0.2<br>
     */
    private boolean doDataTransfer(String dataTransferLogId, String bizDate, DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        Map resultMap = new HashMap();
        try {
            String dataTransferSqlFilePath = createDataTransferSqlFile(bizDate, dataTransferSqlStructureDto);
            resultMap = toDataTransfer(dataTransferSqlFilePath);
        } catch (Exception e) {
            log.error("【{}】数据同步失败: {}", dataTransferSqlStructureDto.getDataTransferSqlFileName(), e.getMessage(), e);
            resultMap.put("SHELL_RESULT", "1");
            resultMap.put("SHELL_ERR_RESULT", e.getMessage());
        } finally {
            updateDataTransferLog(dataTransferLogId, resultMap);
        }
        return "0".equals(resultMap.get(JaveShellUtil.SHELL_RESULT));
    }

    private Map toDataTransfer(String dataTransferSqlFilePath) {
        String shell = StrUtil.format("sh {} {}", scriptPath, dataTransferSqlFilePath);
        if (isUse) return javaShellUtil.executeDataShell(shell);
        return JaveShellUtil.ExecCommand(shell);
    }

    /*
     * @Method: updateDataTransferLog <br>
     * @Param: [dataTransferLogPkId, resultMap] <br>
     * @Return: void <br>
     * @Description：更新数据迁移记录结果<br> 1
     * @Author： wz.li<br>
     * @Date： 2023/8/24 17:46 <br>
     * @Version： V2.0.2<br>
     */
    private void updateDataTransferLog(String dataTransferLogPkId, Map resultMap) {
        DataTransferLogRequestDto dataTransferLogRequestDto = new DataTransferLogRequestDto();
        dataTransferLogRequestDto.setPkId(dataTransferLogPkId).setEndTime(DateUtil.now());
        dataTransferLogRequestDto.setStatus("0".equals(resultMap.get(JaveShellUtil.SHELL_RESULT)) ? "SUCCESS" : "FAIL");
        dataTransferLogRequestDto.setErrorMessage(String.valueOf(resultMap.get(JaveShellUtil.SHELL_ERR_RESULT)));
        dataTransferLogService.modifyById(dataTransferLogRequestDto);
    }

    /*
     * @Method: createDataTransferSqlFile <br>
     * @Param: [bizDate, dataTransferSqlStructureDto] <br>
     * @Return: java.lang.String <br>
     * @Description：构建数据迁移需要的sql文件<br> 1
     * @Author： wz.li<br>
     * @Date： 2023/8/24 17:47 <br>
     * @Version： V2.0.2<br>
     */
    private String createDataTransferSqlFile(String bizDate, DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        String dataTransferSqlFilePath = sqlSavePath + dataTransferSqlStructureDto.getDataTransferSqlFilePath();
        toSourceTableMetadataMapper(dataTransferSqlStructureDto);
        String sqlContent = dataTransferSqlStructure(bizDate, dataTransferSqlStructureDto);
        FileUtil.writeString(sqlContent, dataTransferSqlFilePath, "UTF-8");
        return dataTransferSqlFilePath;
    }

    private String dataTransferSqlStructure(String bizDate, DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        String dataTransferSql =
                // 1.构建FLINK脚本头部
                buildHead(dataTransferSqlStructureDto.getTargetTableName()) + LINEFEED +
                        // 2.构建FLINK源表建表语句
                        buildCreateSourceTable(dataTransferSqlStructureDto) + LINEFEED +
                        // 3.构建FLINK目标表建表语句
                        buildCreateTargetTable(dataTransferSqlStructureDto) + LINEFEED +
                        // 4.构建FLINK插入SQL
                        buildDataInsertSql(bizDate, dataTransferSqlStructureDto) + LINEFEED;
        log.info(StrUtil.format("执行FLINK SQL：\n{}", dataTransferSql));
        return dataTransferSql;
    }

    /**
     * @Method buildHead
     * @Param pipelineName
     * @Return java.lang.String
     * @Description FLINK SQL文件头部配置
     * @Author Yin.Yang
     * @Date 2024/3/11 18:24
     * @Version V1.0
     */
    private String buildHead(String pipelineName) {
        StringBuilder dataTransferSql = new StringBuilder();
        dataTransferSql.append("-- 执行模式: 'batch' 或 'streaming'").append(LINEFEED);
        dataTransferSql.append("SET 'execution.runtime-mode' = 'batch';").append(LINEFEED).append(LINEFEED);

        dataTransferSql.append("-- 可用值: 'table'、'changelog' 和 'tableau'").append(LINEFEED);
        dataTransferSql.append("SET 'sql-client.execution.result-mode' = 'tableau';").append(LINEFEED).append(LINEFEED);

        dataTransferSql.append("-- 设置Flink的并行度（默认为1）").append(LINEFEED);
        dataTransferSql.append("SET 'parallelism.default' = '1';").append(LINEFEED).append(LINEFEED);

        dataTransferSql.append("-- 设置管道名称").append(LINEFEED);
        dataTransferSql.append(StrUtil.format("SET 'pipeline.name' = '{}';", pipelineName)).append(LINEFEED).append(LINEFEED);

        dataTransferSql.append("-- 打印详细的日志").append(LINEFEED);
        dataTransferSql.append("SET 'sql-client.verbose' = 'true';").append(LINEFEED).append(LINEFEED);

        dataTransferSql.append("-- 设置checkpoint参数").append(LINEFEED);
        dataTransferSql.append("SET 'execution.checkpointing.interval' = '60000ms';").append(LINEFEED);
        return dataTransferSql.toString();
    }

    /**
     * @Method buildDataInsertSql
     * @Param bizDate
     * @Param dataTransferSqlStructureDto
     * @Return java.lang.String
     * @Description 构建FLINK插入SQL
     * @Author Yin.Yang
     * @Date 2024/3/11 19:35
     * @Version V1.0
     */
    public String buildDataInsertSql(String bizDate, DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        StringBuilder dataTransferSql = new StringBuilder();
        String template = "INSERT INTO {} SELECT {} FROM default_catalog.default_database.{} {};";
        dataTransferSql.append(StrUtil.format(template, FLINK_TARGET_PREFIX + dataTransferSqlStructureDto.getTargetTable() + LINEFEED,
                dataTransferSqlStructureDto.getSelectTableMetadata(bizDate),
                FLINK_SOURCE_PREFIX + dataTransferSqlStructureDto.getSourceTable(), buildInsertWhere(bizDate, dataTransferSqlStructureDto)));
        return dataTransferSql.toString();
    }

    /**
     * @Method buildSelectWhere
     * @Param bizDate
     * @Param dataTransferSqlStructureDto
     * @Return java.lang.String
     * @Description 构建FLINK插入SQL-WHERE条件
     * @Author Yin.Yang
     * @Date 2024/3/11 19:33
     * @Version V1.0
     */
    public String buildInsertWhere(String bizDate, DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        List<TableTransferFieldMapResponseDto> collect = dataTransferSqlStructureDto.getMetadatas()
                .stream().filter(a -> Objects.equals(a.getIsPartitionField(), "Y")).collect(Collectors.toList());
        if (CollectionUtil.isNotEmpty(collect)) {
            TableTransferFieldMapResponseDto tableTransferFieldMap = collect.get(0);
            return StrUtil.format(" where {} = {}", tableTransferFieldMap.getSourceFieldName(), bizDate);
        }
        return "";
    }

    /*
     * @Method: toSourceTableMetadataMapper <br>
     * @Param: [dataTransferSqlStructureDto] <br>
     * @Return: void <br>
     * @Description：源表与FLINK字段类型映射转换<br>
     * @Author： wz.li<br>
     * @Date： 2023/8/24 17:49 <br>
     * @Version： V2.0.2<br>
     */
    private void toSourceTableMetadataMapper(DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        for (TableTransferFieldMapResponseDto metadata : dataTransferSqlStructureDto.getMetadatas()) {
            TableConvertResponseDto tableConvert = getTableConvert(dataTransferSqlStructureDto, metadata);
            if (tableConvert != null) {
                metadata.setSourceFieldType(tableConvert.getTargetValue());
                if (StrUtil.isNotBlank(metadata.getSourceFieldLength()) && tableConvert.getCoefficient() != 0) {
                    String[] lengths = metadata.getSourceFieldLength().split(",");
                    for (String length : lengths) {
                        length = String.valueOf(tableConvert.getCoefficient() * Integer.valueOf(length));
                    }
                    metadata.setSourceFieldLength(StrUtil.join(",", lengths));
                } else {
                    metadata.setSourceFieldLength(null);
                }
            }
        }
    }

    /*
     * @Method: getTableConvert <br>
     * @Param: [dataTransferSqlStructureDto, metadata] <br>
     * @Return: com.hex.ds.hdtp.core.app.data.configuration.dto.response.TableConvertResponseDto <br>
     * @Description：获取源表与FLINK字段类型映射配置<br>
     * @Author： wz.li<br>
     * @Date： 2023/8/24 17:49 <br>
     * @Version： V2.0.2<br>
     */
    private TableConvertResponseDto getTableConvert(DataTransferSqlStructureDto dataTransferSqlStructureDto, TableTransferFieldMapResponseDto metadata) {
        TableConvertRequestDto tableConvertRequestDto = new TableConvertRequestDto().setTargetServer("FLINK")
                .setOriginalServer(dataTransferSqlStructureDto.getSourceDatabaseType())
                .setConvertType("fieldType").setOriginalValue(metadata.getSourceFieldType());
        List<TableConvertResponseDto> tableConvertResponseDtos = tableConvertService.queryList(tableConvertRequestDto);
        return (CollectionUtil.isNotEmpty(tableConvertResponseDtos)) ? tableConvertResponseDtos.get(0) : null;
    }

    /**
     * @Method refreshImpalaMetadata
     * @Param dataTransferSqlStructureDto
     * @Return void
     * @Description impala刷新元数据
     * @Author gj.xu
     * @Date 2024/1/30 11:12
     * @Version V2.0
     */
    protected void refreshImpalaMetadata(DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        sourceInfoService.refreshImpalaMetadata(dataTransferSqlStructureDto.getTargetConnPkId(), dataTransferSqlStructureDto.getTargetTableName());
    }

    /**
     * @Method buildCreateSourceTable
     * @Param dataTransferSqlStructureDto
     * @Return java.lang.String
     * @Description 构建FLINK源表建表语句
     * @Author Yin.Yang
     * @Date 2024/3/11 19:41
     * @Version V1.0
     */
    protected abstract String buildCreateSourceTable(DataTransferSqlStructureDto dataTransferSqlStructureDto);

    /**
     * @Method buildCreateTargetTable
     * @Param dataTransferSqlStructureDto
     * @Return java.lang.String
     * @Description 构建FLINK目标表建表语句
     * @Author Yin.Yang
     * @Date 2024/3/11 19:41
     * @Version V1.0
     */
    protected abstract String buildCreateTargetTable(DataTransferSqlStructureDto dataTransferSqlStructureDto);
}
