package com.hexinfo.dmpro.component.data.transfer.service.impl;

import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.RuntimeUtil;
import cn.hutool.core.util.StrUtil;
import com.hexinfo.dmpro.component.data.transfer.model.DataExport;
import com.hexinfo.dmpro.component.data.transfer.model.DataImport;
import com.hexinfo.dmpro.component.data.transfer.service.DataImportService;
import com.hexinfo.dmpro.component.data.transfer.utils.ShellUtil;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Map;

/**
 * @Package com.hexinfo.dmpro.component.data.transfer.service.impl
 * @ClassName DataImportServiceImpl
 * @Description 数据导入-接口实现
 * @Author Wang zhihao
 * @Date 2023/9/7 16:54
 * @Version v1.0
 **/
@Slf4j
@Service
@RequiredArgsConstructor(onConstructor = @__(@Autowired))
public class DataImportServiceImpl implements DataImportService {

    /**
     * @param dataImport
     * @Method dataImportFromLocal
     * @Param
     * @Return boolean
     * @Description 数据从本地导入到Hive
     * @Author Wang zhihao
     * @Date 2023/9/7 17:00
     * @Version V1.0
     */
    @Override
    public void dataImportFromLocal(DataImport dataImport) {
        try {
            doCheck(dataImport);
            doImport(dataImport);
        } catch (Exception e) {
            log.error("[HIVE数据导入]TXT文本数据导入到表[{}]异常: {}", dataImport.getTableFullName(), e.getMessage(), e);
            throw new RuntimeException(StrUtil.format("TXT文本数据导入到表[{}]异常: {}", dataImport.getTableFullName(), e.getMessage()));
        } finally {
            // 删除临时文件和临时表
        }
    }

    /**
     * @param dataImport
     * @Method doCheck
     * @Param
     * @Return void
     * @Description 导入前检查
     * @Author Wang zhihao
     * @Date 2023/9/12 9:59
     * @Version V1.0
     */
    private void doCheck(DataImport dataImport) {
        try {
            // 检查local文件是否存在
            isLocalFileExist(dataImport);

            // 校验Hive表是否存在
            isHiveTableExist(dataImport);

            // 创建HDFS目录
            hdfsMkdir(dataImport.getHdfsDir());

            // 授权新创建的HDFS目录给hdfs用户
            hdfsDirChown(dataImport.getHdfsDir());

            // 创建HIVE TXT文本表
            createHiveTxTTable(dataImport);

            // 创建HIVE Parquet表
            createHiveParquetTable(dataImport);

        } catch (Exception e) {
            log.error("[HIVE数据导入]表[{}]前置处理异常: {}", e.getMessage(), e);
            throw new RuntimeException("[导入前检查]发生异常: " + e.getMessage());
        }
    }

    private void createHiveTxTTable(DataImport dataImport) {
        try {
            runHiveShell(dataImport, dataImport.getTxtJdbcUrls(), dataImport.getCreateTxtTableSql());
        } catch (Exception e) {
            log.error("[HIVE数据导入]表[{}]创建文本表异常: {}", e.getMessage(), e);
            throw new RuntimeException("创建文本表异常: " + e.getMessage());
        }
    }

    private void hdfsMkdir(String hdfsDir) {
        if (!existHdfsDir(hdfsDir)) {
            String shell = StrUtil.format("hadoop fs -mkdir -p {}", hdfsDir);
            Map result = ShellUtil.exec(shell);
            if (!result.containsKey("INVOKE_STATUS") || !(boolean) result.get("INVOKE_STATUS")) {
                throw new RuntimeException(StrUtil.format("创建HDFS目录发生异常: {}", result.get("SHELL_ERR_RESULT")));
            }
        }
    }

    private boolean existHdfsDir(String hdfsDir) {
        String shell = StrUtil.format("hadoop fs -test -e {}", hdfsDir);
        Map result = ShellUtil.exec(shell);
        return result.containsKey("INVOKE_STATUS") && (boolean) result.get("INVOKE_STATUS");
    }

    private void hdfsDirChown(String hdfsDir) {
        String shell = StrUtil.format("sudo -u hdfs hadoop fs -chown -R hive:hive {}", hdfsDir);
        Map result = ShellUtil.exec(shell);
        if (!result.containsKey("INVOKE_STATUS") || !(boolean) result.get("INVOKE_STATUS")) {
            throw new RuntimeException("HDFS目录授权发生异常: " + result.get("SHELL_ERR_RESULT"));
        }
    }

    private void hdfsHdfsFileChown(String hdfsHdfsFilePath) {
        String shell = StrUtil.format("sudo -u hdfs hadoop fs -chmod -R 777 {}", hdfsHdfsFilePath);
        Map result = ShellUtil.exec(shell);
        if (!result.containsKey("INVOKE_STATUS") || !(boolean) result.get("INVOKE_STATUS")) {
            throw new RuntimeException("HDFS数据文件授权异常: " + result.get("SHELL_ERR_RESULT"));
        }
    }

    /**
     * @Method doImport
     * @Param
     * @Param dataImport
     * @Return void
     * @Description 导入
     * @Author Wang zhihao
     * @Date 2023/9/12 9:48
     * @Version V1.0
     */
    private void doImport(DataImport dataImport) {
        try {
            // 将本地文件传到HDFS上
            localImportToHdfs(dataImport);

            // 数据文件授权
            hdfsHdfsFileChown(dataImport.getHdfsDir() + "/" + dataImport.getHdfsFileName());

            // 数据文件从HDFS导入到Hive TXT文本表中
            hdfsToHiveTxtTable(dataImport);

            // 数据从TXT文本表中装载到HIVE目标表中
            hiveTxtTableToHiveParquetTable(dataImport);

        } catch (Exception e) {
            log.error("【数据导入】-[导入]发生异常: {}", e.getMessage(), e);
            throw new RuntimeException("【数据导入】-[导入]发生异常: " + e.getMessage());
        } finally {
            removeTempHdfsPath(dataImport);
            dropTxtTable(dataImport);
        }
    }

    private void dropTxtTable(DataImport dataImport) {
        if ("Y".equals(dataImport.getIsCreateParquetTable())) {
            try {
                String tmplate = "DROP TABLE IF EXISTS {};";
                String sql = StrUtil.format(tmplate, dataImport.getTxtTableFullName());
                runHiveShell(dataImport, dataImport.getParquetJdbcUrls(), sql);
            } catch (Exception e) {
                log.error("[HIVE数据导入]表[{}]TXT文本表删除异常: {}", e.getMessage(), e);
            }
        }
    }

    public void removeTempHdfsPath(DataImport dataImport) {
        String shell = StrUtil.format("sudo -u hdfs hadoop fs -rm -r -f {}", dataImport.getHdfsDir());
        ShellUtil.exec(shell);
    }

    /**
     * @param dataImport
     * @Method isLocalFileExist
     * @Param
     * @Return void
     * @Description 检查local文件是否存在
     * @Author Wang zhihao
     * @Date 2023/9/7 17:10
     * @Version V1.0
     */
    public void isLocalFileExist(DataImport dataImport) {
        if (!FileUtil.exist(dataImport.getLocalPath())) {
            throw new RuntimeException("本地数据文件不存在，请检查文件");
        }
    }

    /**
     * @Method localImportToHdfs
     * @Param
     * @Return void
     * @Description 本地文件导入到HDFS中
     * @Author Wang zhihao
     * @Date 2023/9/7 17:14
     * @Version V1.0
     */
    public void localImportToHdfs(DataImport dataImport) throws Exception {
        String shell = StrUtil.format("sudo -u hive hadoop fs -put -f {} {}", dataImport.getLocalPath(), dataImport.getHdfsDir());
        Map result = ShellUtil.exec(shell);
        if (!result.containsKey("INVOKE_STATUS") || !(boolean) result.get("INVOKE_STATUS")) {
            throw new RuntimeException("本地数据文件上传到HDFS异常: " + result.get("SHELL_ERR_RESULT"));
        }
    }

    /**
     * @Method isHiveTableExist
     * @Param dataImport
     * @Return void
     * @Description 校验hive表是否存在
     * @Author Wang zhihao
     * @Date 2023/9/7 18:09
     * @Version V1.0
     */
    public void isHiveTableExist(DataImport dataImport) {
        String querySql = StrUtil.format("DESCRIBE {}", dataImport.getTableFullName());
        System.out.println("querySql:" + querySql);
        // 若表不存在，但设定自动建表，执行建表操作
//        if (!runHiveShellWithStatus(querySql).equals(0) && dataImport.getIsAutoCreateTable().equals("Y")) {
//            createHiveTable(dataImport);
//        // 若表不存在，但并未设定自动建表，抛出异常
//        } else if (!runHiveShellWithStatus(querySql).equals(0) && dataImport.getIsAutoCreateTable().equals("N")){
//            throw new RuntimeException("检测到Hive表为空");
//        }
    }

    /*
     * @Method: createHiveParquetTable <br>
     * @Param: [dataImport] <br>
     * @Return: void <br>
     * @Description：创建HIVE Parquet表<br>
     * @Author： wz.li<br>
     * @Date： 2023/10/23 10:36 <br>
     * @Version： V2.0.2<br>
     */
    private void createHiveParquetTable(DataImport dataImport) {
        if ("Y".equals(dataImport.getIsCreateParquetTable())) {
            try {
                runHiveShell(dataImport, dataImport.getTxtJdbcUrls(), dataImport.getCreateTableSql());
            } catch (Exception e) {
                throw new RuntimeException("创建HIVE Parquet表失败，失败原因: " + e.getMessage(), e);
            }
        }
    }

    /**
     * @param dataImport
     * @Method hdfsToHive
     * @Param
     * @Return void
     * @Description 数据文件导入到Hive表中
     * @Author Wang zhihao
     * @Date 2023/9/7 18:33
     * @Version V1.0
     */
    public void hdfsToHiveTxtTable(DataImport dataImport) {
        try {
            String tmplate = "LOAD DATA INPATH '{}' {} INTO TABLE {} {};";
            String sql = StrUtil.format(tmplate, dataImport.getHdfsPath(), dataImport.getIsOverwrite(), dataImport.getTxtTableFullName(), getTablePartition(dataImport));
            runHiveShell(dataImport, dataImport.getTxtJdbcUrls(), sql);
        } catch (Exception e) {
            log.error("[HIVE数据导入]表[{}]从HDFS加载到HIVE文本表中异常: {}", e.getMessage(), e);
            throw new RuntimeException("从HDFS加载到HIVE文本表中异常: " + e.getMessage());
        }
    }

    private void hiveTxtTableToHiveParquetTable(DataImport dataImport) {
        if ("Y".equals(dataImport.getIsInsertParquetTable())) {
            try {
                StringBuilder sql = new StringBuilder();
                sql.append("set hive.exec.dynamic.partition=true;");
                sql.append("set hive.exec.dynamic.partition.mode=nonstrict;");
                sql.append(dataImport.getInsertParquetTableSql());
                runHiveShell(dataImport, dataImport.getParquetJdbcUrls(), sql.toString());
            } catch (Exception e) {
                log.error("[HIVE数据导入]表[{}]TXT文本表加载到目标表异常: {}", e.getMessage(), e);
                throw new RuntimeException("TXT文本表加载到目标表异常: " + e.getMessage());
            }
        }
    }

    private String getTablePartition(DataImport dataImport) {
        return StrUtil.isEmptyOrUndefined(dataImport.getPartitionName()) ? "" : StrUtil.format(" PARTITION({})", dataImport.getPartitionConfiguration());
    }

    /**
     * @param sql
     * @Method runHiveShell
     * @Param
     * @Return void
     * @Description 执行Hive的shell命令
     * @Author Wang zhihao
     * @Date 2023/9/12 20:13
     * @Version V1.0
     */
    private void runHiveShell(DataImport dataImport, String jdbcUrl, String sql) throws Exception {
        Process process = RuntimeUtil.exec(getSheelCmd(dataImport, jdbcUrl, sql));

        // 获取命令执行的输出结果
        InputStream inputStream = process.getInputStream();
        BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
        String line;
        while ((line = reader.readLine()) != null) {
            log.info(line);
        }
        int exitCode = process.waitFor();
        if (exitCode != 0) {
            throw new RuntimeException("执行失败，执行返回码为: " + exitCode);
        }
    }

    private String[] getSheelCmd(DataImport dataImport, String jdbcUrl, String sql) {
        if (StrUtil.isNotBlank(dataImport.getHiveUserName()) && StrUtil.isNotBlank(dataImport.getHivePassword())) {
            String[] cmd = {"beeline", "-u", jdbcUrl, "-n", dataImport.getHiveUserName(), "-p", dataImport.getHivePassword(), "-e", sql};
            return cmd;
        } else {
            String[] cmd = {"beeline", "-n", "hdfs", "--verbose=true", "-u", jdbcUrl, "-e", sql};
            return cmd;
        }
    }
}
