package com.hexinfo.dmpro.component.data.transfer.service.impl;

import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.RuntimeUtil;
import cn.hutool.core.util.StrUtil;
import com.hexinfo.dmpro.component.data.transfer.model.DataExport;
import com.hexinfo.dmpro.component.data.transfer.service.DataExportService;
import com.hexinfo.dmpro.component.data.transfer.utils.ShellUtil;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.util.Map;

/**
 * @Package com.hexinfo.dmpro.component.data.transfer.service.impl
 * @ClassName DataExportServiceImpl
 * @Description 数据导出-接口实现
 * @Author Wang zhihao
 * @Date 2023/9/7 10:29
 * @Version v1.0
 **/
@Slf4j
@Service
@RequiredArgsConstructor(onConstructor = @__(@Autowired))
public class DataExportServiceImpl implements DataExportService {

    /**
     * @param dataExport
     * @Method dataExportToLocal
     * @Param
     * @Return boolean
     * @Description Hive数据导出到本地
     * @Author Wang zhihao
     * @Date 2023/9/7 13:23
     * @Version V1.0
     */
    @Override
    public void dataExportToLocal(DataExport dataExport) {
        try {
            doCheck(dataExport);
            doExport(dataExport);
        } catch (Exception e) {
            log.error("[HIVE数据导出]表[{}]导出成TXT文本异常: {}", dataExport.getTableFullName(), e.getMessage(), e);
            throw new RuntimeException(StrUtil.format("[HIVE数据导出]表[{}]导出成TXT文本异常: {}", dataExport.getTableFullName(), e.getMessage()));
        }
    }

    /**
     * @param dataExport
     * @Method doCheck
     * @Param
     * @Return void
     * @Description 导出前检查
     * @Author Wang zhihao
     * @Date 2023/9/12 11:33
     * @Version V1.0
     */
    private void doCheck(DataExport dataExport) {
        try {
            // 检查Hive表是否存在
//            isHiveTableExist(dataExport);
            // 确认本地文件存在
            checkLocalFileExist(dataExport);
        } catch (Exception e) {
            log.error("[HIVE数据导出]表[{}]导出成TXT文本,前置检查异常: {}", dataExport.getTableFullName(), e.getMessage(), e);
            throw new RuntimeException(StrUtil.format("前置检查异常: {}", e.getMessage()));
        }

    }

    /**
     * @param dataExport
     * @Method doExport
     * @Param
     * @Return void
     * @Description 导出
     * @Author Wang zhihao
     * @Date 2023/9/12 11:33
     * @Version V1.0
     */
    private void doExport(DataExport dataExport) {
        try {
            // ④ Hive导出到HDFS指定目录下
            hiveExportToHdfs(dataExport);
            // ⑦ HDFS目录下的文件整合输出到本地指定文件
            hdfsMergeToLocalByShell(dataExport);
        } catch (Exception e) {
            log.error("[HIVE数据导出]表[{}]导出成TXT文本异常: {}", dataExport.getTableFullName(), e.getMessage(), e);
            throw new RuntimeException(StrUtil.format("HIVE数据表导出成TXT文本异常:", e.getMessage()));
        } finally {
            removeTempHdfsPath(dataExport);
        }
    }

    /**
     * @param dataExport
     * @Method hiveExportToHdfs
     * @Param
     * @Return void
     * @Description 从Hive导出数据文件到HDFS
     * @Author Wang zhihao
     * @Date 2023/9/12 17:02
     * @Version V1.0
     */
    public void hiveExportToHdfs(DataExport dataExport) {
        try {
            String tmplate = "INSERT OVERWRITE DIRECTORY '{}' ROW FORMAT DELIMITED FIELDS TERMINATED BY '{}' {};";
            String sql = StrUtil.format(tmplate, dataExport.getHdfsDir(), dataExport.getSeparator(), dataExport.getHiveQuerySql());
            ShellUtil.exec(getSheelCmd(dataExport, dataExport.getHiveJdbcUrls(), sql));
        } catch (Exception e) {
            log.error("[HIVE数据导出]表[{}]导出数据文件到HDFS异常: {}", dataExport.getTableFullName(), e.getMessage(), e);
            throw new RuntimeException("从Hive导出数据文件到HDFS异常: " + e.getMessage());
        }
    }

    /**
     * @Method checkLocalFileExist
     * @Param dataExport
     * @Return void
     * @Description 确认本地目录/文件存在，若不存在则创建
     * @Author Wang zhihao
     * @Date 2023/9/12 13:15
     * @Version V1.0
     */
    public void checkLocalFileExist(DataExport dataExport) {
        if ("Y".equals(dataExport.getIsDownloadToLocal()) && !FileUtil.exist(dataExport.getLocalPath())) {
            FileUtil.touch(dataExport.getLocalPath());
        }
    }

    /**
     * @param dataExport
     * @Method hdfsMergeToLocalByShell
     * @Param
     * @Return void
     * @Description 通过Shell的方法将HDFS目录下的文件整合输出到本地
     * @Author Wang zhihao
     * @Date 2023/9/8 14:13
     * @Version V1.0
     */
    public void hdfsMergeToLocalByShell(DataExport dataExport) {
        String shell = StrUtil.format("hadoop fs -getmerge {} {}", dataExport.getHdfsDir(), dataExport.getLocalPath());
        Map result = ShellUtil.exec(shell);
        if (!result.containsKey("INVOKE_STATUS") || !(boolean) result.get("INVOKE_STATUS")) {
            throw new RuntimeException("HDFS数据文件下载到本地异常: " + result.get("SHELL_ERR_RESULT"));
        }
    }

    /**
     * @param dataExport
     * @Method removeTempHdfsPath
     * @Param
     * @Return void
     * @Description 删除掉导出过程中产生的临时Hdfs文件
     * @Author Wang zhihao
     * @Date 2023/9/12 14:09
     * @Version V1.0
     */
    public void removeTempHdfsPath(DataExport dataExport) {
        String shell = StrUtil.format("sudo -u hdfs hadoop fs -rm -r -f {}", dataExport.getHdfsDir());
        ShellUtil.exec(shell);
    }

    private String[] getSheelCmd(DataExport dataExport, String jdbcUrl, String sql) {
        if (StrUtil.isNotBlank(dataExport.getHiveUserName()) && StrUtil.isNotBlank(dataExport.getHivePassword())) {
            String[] cmd = {"beeline", "-u", jdbcUrl, "-n", dataExport.getHiveUserName(), "-p", dataExport.getHivePassword(), "-e", sql};
            return cmd;
        } else {
            String[] cmd = {"beeline", "-u", jdbcUrl, "-e", sql};
            return cmd;
        }
    }
}
