package com.hex.ds.hdtp.core.app.data.transfer.handler.impl;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.StrUtil;
import com.hex.ds.hdtp.core.app.common.util.JaveShellUtil;
import com.hex.ds.hdtp.core.app.data.contrast.service.IContrastTableStructureService;
import com.hex.ds.hdtp.core.app.data.transfer.converter.IDataTransferLogConverter;
import com.hex.ds.hdtp.core.app.data.transfer.converter.IDataTransferSqlStructureConverter;
import com.hex.ds.hdtp.core.app.data.transfer.dto.DataTransferSqlStructureDto;
import com.hex.ds.hdtp.core.app.data.transfer.service.IDataTransferListService;
import com.hex.ds.hdtp.core.app.data.transfer.service.IDataTransferLogService;
import com.hex.ds.hdtp.core.app.data.transfer.service.IDataTransferTaskService;
import com.hex.ds.hdtp.core.app.metadata.service.ISourceConfInfoService;
import com.hex.ds.hdtp.core.app.metadata.service.ISourceInfoService;
import com.hex.ds.hdtp.core.inf.metadata.po.SourceConfTemplatePo;
import com.hex.ds.hdtp.core.inf.metadata.repository.impl.SourceConfInfoRepository;
import com.jcraft.jsch.JSchException;
import com.jcraft.jsch.Session;
import lombok.extern.slf4j.Slf4j;
import org.postgresql.core.BaseConnection;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;

@Slf4j
@Service("HIVE_HIVE_FILE")
public class HiveHiveTransferHandlerFileService extends AbstractDataTransferHandlerFileService  {

    @Value("${data.transfer.hdfs-save-path}")
    public String localHdfsPath;

    private static final String LOCALHDFSEXPORTPATH = "localHdfsPath";

    private static final String SHELL_RESULT = "SHELL_RESULT";

    private static final String ANALYSIS_RESULT = "ANALYSIS_RESULT";

    private static final String SHELL_ERR_RESULT = "SHELL_ERR_RESULT";

    private static final String HIVE_KEYTAB = "hive.keytab";

    public HiveHiveTransferHandlerFileService(ISourceInfoService sourceInfoService, ISourceConfInfoService sourceConfInfoService, IDataTransferLogService dataTransferLogService, IDataTransferListService dataTransferListService, IDataTransferTaskService dataTransferTaskService, IDataTransferLogConverter dataTransferLogConverter, IDataTransferSqlStructureConverter dataTransferSqlStructureConverter, IContrastTableStructureService contrastTableStructureService, JaveShellUtil javeShellUtil, SourceConfInfoRepository sourceConfInfoRepository) {
        super(sourceInfoService, sourceConfInfoService, dataTransferLogService, dataTransferListService, dataTransferTaskService, dataTransferLogConverter, dataTransferSqlStructureConverter, contrastTableStructureService, javeShellUtil, sourceConfInfoRepository);
    }

    /*
     * @Method: doDataTransfer <br>
     * @Param: [dataTransferLogId, bizDate, dataTransferSqlStructureDto] <br>
     * @Return: boolean <br>
     * @Description：执行文件方式数据迁移<br>
     * @Author Yin.Yang
     * @Date 2023/9/25 15:33
     * @Version： V1.0<br>
     */
    protected boolean doDataFileTransfer(String dataTransferLogId, String bizDate, DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        Map resultMap = new HashMap();
        Map<String, String> sourceConfMap = dataTransferSqlStructureDto.getSourceConfList()
                .stream().collect(Collectors.toMap(SourceConfTemplatePo::getConfName, conf -> conf.getConfValue() != null ? conf.getConfValue() : ""));
        String sourceIp = sourceConfMap.get("hiveIp");
        String sourceUser = sourceConfMap.get("serverUsername");
        String sourcePwd = sourceConfMap.get("serverUserPwd");
        Map<String, String> targetConfMap = dataTransferSqlStructureDto.getTargetConfList()
                .stream().collect(Collectors.toMap(SourceConfTemplatePo::getConfName, conf -> conf.getConfValue() != null ? conf.getConfValue() : ""));
        String targetIp = targetConfMap.get("hiveIp");
        String targetUser = targetConfMap.get("serverUsername");
        String targetPwd = targetConfMap.get("serverUserPwd");
        try {
            Session session = javeShellUtil.getConnectSession(sourceUser, sourceIp, sourcePwd, 22);
            Session targetSession = javeShellUtil.getConnectSession(targetUser, targetIp, targetPwd, 22);
            // 导出前检查
            doExportCheck(session, sourceConfMap);
            // 导出
            doExport(session, sourceConfMap, dataTransferSqlStructureDto);
            // 导入前检查
            doImportCheck(targetSession, targetConfMap, dataTransferSqlStructureDto);
            // 导入
            doImport(targetSession, targetConfMap, dataTransferSqlStructureDto);
            resultMap.put(SHELL_RESULT, "0");
            resultMap.put(SHELL_ERR_RESULT,"迁移成功");
            resultMap.put(ANALYSIS_RESULT, getResultCount(dataTransferSqlStructureDto));
        } catch (Exception e) {
            log.error("【{}】数据同步失败: {}", dataTransferSqlStructureDto.getDataTransferSqlFileName(), e.getMessage(), e);
            resultMap.put(SHELL_RESULT, "1");
            resultMap.put(ANALYSIS_RESULT, "迁移失败,无法汇总");
            if (resultMap.get(SHELL_ERR_RESULT) == null) {
                resultMap.put(SHELL_ERR_RESULT, e.getMessage());
            }
        } finally {
            updateDataTransferLog(dataTransferLogId, resultMap);
        }
        return "0".equals(resultMap.get(JaveShellUtil.SHELL_RESULT));
    }

    /*
     * @Method doCheck
     * @Param
     * @param dataExport
     * @Return void
     * @Description 导出前检查
     * @Author Yin.Yang
     * @Date 2023/9/25 15:33
     * @Version V1.0
     */
    private void doExportCheck(Session session, Map<String, String> sourceConfMap) {
        try {
            // 确保本地文件存在
            if (!FileUtil.exist(localHdfsPath)) {
                FileUtil.touch(localHdfsPath);
            }
            // 检查服务器存放数据文件目录是否存在
            String localHdfsExportPath = sourceConfMap.get(LOCALHDFSEXPORTPATH);
            javeShellUtil.executeCommandReturn("if [ ! -d " + localHdfsExportPath + " ]; then mkdir -p " + localHdfsExportPath + "; fi", session);
            // 检查源服务器HDFS路经是否存在
            String checkHdfsPath = javeShellUtil.executeCommandReturn(StrUtil.format("hdfs dfs -test -e {} && echo \"源HDFS路径存在\" || echo \"源HDFS路径不存在\"", sourceConfMap.get("hdfsPath")), session);
            if (checkHdfsPath.equals("源HDFS路径不存在")) {
                throw new RuntimeException("【数据导出】检查到源HDFS路径不存在，请检查是否创建");
            }
            // 检验是否存在Kerberos认证
            if (sourceConfMap.get("authType").equalsIgnoreCase("KERBEROS")) {
                javeShellUtil.putFileToConnect(session, sourceConfMap.get("keyTabFilePath"), sourceConfMap.get(LOCALHDFSEXPORTPATH) + HIVE_KEYTAB);
                String kerberosSQL = StrUtil.format("kinit -k -t {} {}", sourceConfMap.get(LOCALHDFSEXPORTPATH) + HIVE_KEYTAB, sourceConfMap.get("principalUser"));
                javeShellUtil.executeCommandReturn(kerberosSQL, session);
            }
        } catch (Exception e) {
            throw new RuntimeException("【数据导出】-[导出前检查]发生异常:" + e.getMessage());
        }
    }

    /*
     * @Method doExport
     * @Param
     * @param dataExport
     * @Return void
     * @Description 导出
     * @Author Yin.Yang
     * @Date 2023/9/25 15:33
     * @Version V1.0
     */
    private void doExport(Session session, Map<String, String> sourceConfMap, DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        String hdfsExportPath = StrUtil.format("{}/{}", sourceConfMap.get("hdfsPath"), dataTransferSqlStructureDto.getSourceTable());
        String localHdfsExportPath = sourceConfMap.get(LOCALHDFSEXPORTPATH) + dataTransferSqlStructureDto.getSourceTable() + ".txt";
        String separator = sourceConfMap.get("hiveSeparator");
        try {
            String insertSql = StrUtil.format("INSERT OVERWRITE DIRECTORY '{}' ROW FORMAT DELIMITED ", hdfsExportPath);
            String formatSql = StrUtil.isEmptyOrUndefined(separator) ? ""
                    : StrUtil.format("FIELDS TERMINATED BY '{}' ", separator);
            String querySql = StrUtil.format("select * from {};", dataTransferSqlStructureDto.getSourceTableName());
            runHiveShell(insertSql + formatSql + querySql, session);
            // 将指定的HDFS地址中的文件合并到服务器本地，再将该文件通过SFTP传输到平台本地
            javeShellUtil.executeCommandReturn("hadoop fs -getmerge " + hdfsExportPath + " " + localHdfsExportPath, session);
            javeShellUtil.pullFileFromConnect(session, localHdfsExportPath, localHdfsPath + dataTransferSqlStructureDto.getSourceTable() + ".txt");
        } catch (Exception e) {
            throw new RuntimeException("【数据导出】-[导出]发生异常:" + e.getMessage());
        } finally {
            removeTempHdfsFile(session, hdfsExportPath, localHdfsExportPath);
        }
    }

    /*
     * @Method doCheck
     * @Param
     * @param dataImport
     * @Return void
     * @Description 导入前检查
     * @Author Yin.Yang
     * @Date 2023/9/25 15:33
     * @Version V1.0
     */
    private void doImportCheck(Session targetSession, Map<String, String> targetConfMap, DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        try {
            // 检查本地的文件是否存在
            if (!FileUtil.exist(localHdfsPath + dataTransferSqlStructureDto.getSourceTable() + ".txt")) {
                throw new RuntimeException("【数据导入】检查到本地文件不存在，请检查文件");
            }
            // 检查服务器存放HDFS数据文件目录是否存在
            String localHdfsImportPath = targetConfMap.get(LOCALHDFSEXPORTPATH);
            javeShellUtil.executeCommandReturn("if [ ! -d " + localHdfsImportPath + " ]; then mkdir -p " + localHdfsImportPath + "; fi", targetSession);
            // 校验Hive目标表是否存在
            String checkTableExist = javeShellUtil.executeCommandReturn("hive -e 'DESCRIBE " + dataTransferSqlStructureDto.getTargetTableName() + "'", targetSession);
            if (checkTableExist.contains("FAILED: SemanticException")) {
                throw new RuntimeException("【数据导入】检查到Hive目标表不存在，请检查是否建表");
            }
            // 检验是否存在Kerberos认证
            if (targetConfMap.get("authType").equalsIgnoreCase("KERBEROS")) {
                javeShellUtil.putFileToConnect(targetSession, targetConfMap.get("keyTabFilePath"), targetConfMap.get(LOCALHDFSEXPORTPATH) + HIVE_KEYTAB);
                String kerberosSQL = StrUtil.format("kinit -k -t {} {}", targetConfMap.get(LOCALHDFSEXPORTPATH) + HIVE_KEYTAB, targetConfMap.get("principalUser"));
                javeShellUtil.executeCommandReturn(kerberosSQL, targetSession);
            }
        } catch (Exception e) {
            throw new RuntimeException("【数据导入】-[导入前检查]发生异常: " + e.getMessage());
        }
    }

    /*
     * @Method doImport
     * @Param
     * @Param dataImport
     * @Return void
     * @Description 导入
     * @Author Yin.Yang
     * @Date 2023/9/25 15:33
     * @Version V1.0
     */
    private void doImport(Session targetSession, Map<String, String> targetConfMap, DataTransferSqlStructureDto dataTransferSqlStructureDto) {
        String localHdfsImportPath = targetConfMap.get(LOCALHDFSEXPORTPATH) + dataTransferSqlStructureDto.getTargetTable() + ".txt";
        String partitionName = Optional.ofNullable(dataTransferSqlStructureDto.getPartitionFieldNameList()).map(CollUtil::getFirst).orElse(null);
        String bizDates = dataTransferSqlStructureDto.getBizDates().get(0).toString().replaceAll("[^0-9]", "").substring(0, 8);
        String isOverwrite = targetConfMap.get("isOverwrite").equalsIgnoreCase("TOTAL") ? "OVERWRITE" : "";
        try {
            // 将平台本地数据文件传到目标服务器上，再导入到HIVE表中
            javeShellUtil.putFileToConnect(targetSession, localHdfsPath + dataTransferSqlStructureDto.getSourceTable() + ".txt", localHdfsImportPath);
            String loadSql = StrUtil.format("LOAD DATA LOCAL INPATH '{}' ", localHdfsImportPath);
            String insertSql = StrUtil.format("{} INTO TABLE {} ", isOverwrite, dataTransferSqlStructureDto.getTargetTableName());
            String partitionSql = StrUtil.isEmptyOrUndefined(partitionName) ? "" : StrUtil.format("PARTITION ({}='{}')", partitionName ,bizDates);
            runHiveShell(loadSql + insertSql + partitionSql, targetSession);
        } catch (Exception e) {
            throw new RuntimeException("【数据导入】-[导入]发生异常: " + e.getMessage());
        }
    }

    /*
     * @Method removeTempHdfsFile
     * @param session
     * @param hdfsExportPath
     * @Return void
     * @Description 删除掉导出过程中产生的临时Hdfs文件
     * @Author Yin.Yang
     * @Date 2023/10/8 17:58
     * @Version V1.0
     */
    public void removeTempHdfsFile(Session session, String hdfsExportPath, String localHdfsPath) {
        try {
            javeShellUtil.executeCommandReturn("hdfs dfs -rm -r " + hdfsExportPath, session);
            javeShellUtil.executeCommandReturn("rm -rf " + localHdfsPath, session);
        } catch (Exception e) {
            throw new RuntimeException("删除HDFS路径" + hdfsExportPath + "发生异常: " + e.getMessage());
        }
    }

    /*
     * @Method runHiveShell
     * @param sql
     * @Return void
     * @Description 执行Hive的shell命令
     * @Author Yin.Yang
     * @Date 2023/9/25 20:13
     * @Version V1.0
     */
    private void runHiveShell(String sql, Session session) throws JSchException, IOException {
        javeShellUtil.executeCommandReturn("hive -e " + "\"" + sql + "\"", session);
    }

    @Override
    protected void copyFromFile(BaseConnection baseConn, String tempFilePath, String targetTableName, String delimiter, String targetEncoding, Map<String, String> resultMap) {
        // Do nothing
    }

}
