package com.powerdata.system.paimon.impl;

import cn.hutool.core.date.DateUtil;
import com.powerdata.common.utils.SecurityUtils;
import com.powerdata.common.utils.StringUtils;
import com.powerdata.common.utils.exec.PDJschUtils;

import com.powerdata.core.paimon.engine.PDPaimonExecUtils;
import com.powerdata.core.paimon.engine.PDPaimonFlinkUtils;
import com.powerdata.core.paimon.engine.PDPaimonSparkUtils;
import com.powerdata.core.paimon.PDPaimonUtils;
import com.powerdata.system.domain.PaimonExecLog;
import com.powerdata.system.domain.param.PaimonCatalogParam;
import com.powerdata.system.domain.param.PaimonCopyTableParam;
import com.powerdata.system.domain.param.PaimonTableParam;
import com.powerdata.system.mapper.PaimonExecLogMapper;
import com.powerdata.system.paimon.ISqlExecuteService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.util.ObjectUtils;

import javax.annotation.Resource;
import java.io.*;
import java.util.*;

/**
 * @author deeprado
 * @version 1.0
 * @description
 * @date 2023/6/16 15:04
 */
@Service
public class SqlExecuteServiceImpl implements ISqlExecuteService {

    private static final Logger log = LoggerFactory.getLogger(SqlExecuteServiceImpl.class);

    //现在获取日志的方式很呆，且问题很大，后面会调整
    public static String sqlLogPath = "./logs/sys-info.log";

    @Value(value = "${paimonManager.hiveConf}")
    private String uploadHiveFilePath;
    @Value(value = "${paimonManager.hadoopUser}")
    private String hadoopUser;

    @Value(value = "${paimonManager.sparkHome}")
    private String sparkHome;
    @Value(value = "${paimonManager.sparkExecHost}")
    private String sparkExecHost;
    @Value(value = "${paimonManager.sparkExecPort}")
    private String sparkExecPort;
    @Value(value = "${paimonManager.sparkExecUser}")
    private String sparkExecUser;
    @Value(value = "${paimonManager.sparkExecPassWd}")
    private String sparkExecPassWd;
    @Resource
    private PaimonExecLogMapper paimonExecLogMapper;

    @Override
    public Map<String, Object> executeSql(PaimonCatalogParam paimonCatalogParam) throws Exception {
        LinkedHashMap<String, Object> resultMap = new LinkedHashMap<>();
        String execType = paimonCatalogParam.getExecType();
        String mode = paimonCatalogParam.getMode();
        PDPaimonExecUtils pdPaimonExecUtils = null;
        PaimonExecLog paimonExecLog = new PaimonExecLog();

        if (!"local".equals(mode) && "spark".equals(execType)) {
            Date begin = new Date();
            paimonExecLog.setId(begin.getTime() + "");
            paimonExecLog.setOther2(paimonCatalogParam.getId() + "," + paimonCatalogParam.getDatabaseName() + "," + execType + "," + mode);
            paimonExecLog.setCreater(SecurityUtils.getLoginUser().getUsername());
            paimonExecLog.setStatus("0");
            paimonExecLog.setCreatetime(begin.getTime() + "");
            paimonExecLog.setExectimes("-");
            paimonExecLog.setSqlstr(paimonCatalogParam.getExecSql());
            paimonExecLog.setOther1("-");
            paimonExecLogMapper.insertSelective(paimonExecLog);

            if (paimonCatalogParam.getExecSql().contains(";")) {
                throw new Exception("sparkOnYarn执行机不支持执行多条sql命令");
            }
            PDJschUtils execOnYarn = null;
            HashMap<String, Object> dataMap = new HashMap<>();
            try {
                execOnYarn = PDJschUtils.build(sparkExecHost, Integer.parseInt(sparkExecPort), sparkExecUser, sparkExecPassWd);
                String execSql = initExecSql(paimonCatalogParam);
                Map<String, String> execResult = execOnYarn.exec(execSql);
                String runLog = execResult.get("runLog");
                String errLog = execResult.get("errLog");
                if ("0".equals(execResult.get("resultKey"))) {
                    String[] response_codes = runLog.split("Response code");
                    String[] split = errLog.split("Time taken:");
                    String responseData = response_codes[1];
                    if (StringUtils.isEmpty(responseData)) {
                        dataMap.put("data", "sql执行成功");
                    } else {
                        List<Map<String, Object>> data = responseToData(responseData);
                        if (ObjectUtils.isEmpty(data) || data.size() == 0) {
                            dataMap.put("data", "sql执行成功");
                        } else {
                            dataMap.put("data", data);
                        }
                    }
                    dataMap.put("execTime", split[split.length - 1]);
                    dataMap.put("log", errLog);
                    paimonExecLog.setStatus("2");
                    paimonExecLog.setOther1(errLog + "\ndata:" + runLog);
                } else {
                    dataMap.put("data", errLog);
                    dataMap.put("log", errLog);
                    paimonExecLog.setStatus("1");
                    paimonExecLog.setOther1(errLog);
                }
            } catch (Exception e) {
                e.printStackTrace();
                dataMap.put("log", e.getMessage());
                paimonExecLog.setStatus("1");
                paimonExecLog.setOther1(e.getStackTrace().toString());
                resultMap.put(paimonCatalogParam.getExecSql(), dataMap);
                paimonExecLog.setExectimes(DateUtil.formatBetween(begin, new Date()));
                paimonExecLogMapper.updateByPrimaryKeySelective(paimonExecLog);
                throw new Exception(e.getMessage());
            } finally {
                Objects.requireNonNull(execOnYarn).close();
            }
            resultMap.put(paimonCatalogParam.getExecSql(), dataMap);

            paimonExecLog.setExectimes(DateUtil.formatBetween(begin, new Date()));
            paimonExecLogMapper.updateByPrimaryKeySelective(paimonExecLog);

            return resultMap;
        }

        if ("spark".equals(execType)) {
            pdPaimonExecUtils =
                    PDPaimonSparkUtils.build(paimonCatalogParam.getId(), paimonCatalogParam.getTypes(),
                            paimonCatalogParam.getHiveurl(), paimonCatalogParam.getHdfsurl(),
                            paimonCatalogParam.getDatabaseName(), hadoopUser);
        } else if ("flink".equals(execType)) {
            pdPaimonExecUtils =
                    PDPaimonFlinkUtils.build(paimonCatalogParam.getId(), paimonCatalogParam.getTypes(),
                            paimonCatalogParam.getHiveurl(), paimonCatalogParam.getHdfsurl(),
                            paimonCatalogParam.getDatabaseName(), hadoopUser);
        } else {
            //
            log.info("xxx");
        }
        if (!Optional.ofNullable(pdPaimonExecUtils).isPresent()) {
            return resultMap;
        }
        paimonExecLog.setOther2(paimonCatalogParam.getId() + "," + paimonCatalogParam.getDatabaseName() + "," + execType + "," + mode);
        String sqlStr = paimonCatalogParam.getExecSql().replaceAll("\n", " ").trim();
        while (sqlStr.contains("  ") || sqlStr.contains(";;")) {
            sqlStr = sqlStr.replaceAll("  ", " ").replaceAll(";;", ";");
        }
        String[] sqlArr = sqlStr.split(";");
        if ("spark".equals(execType)) {
            pdPaimonExecUtils.executeSql("use `" + paimonCatalogParam.getId() + "`");
            pdPaimonExecUtils.executeSql("use `" + paimonCatalogParam.getDatabaseName() + "`");

        } else if ("flink".equals(execType)) {
            pdPaimonExecUtils.executeSql("use `" + paimonCatalogParam.getId() + "`.`" + paimonCatalogParam.getDatabaseName() + "` ");
        } else {

        }
        int i = 0;
        for (String sql : sqlArr) {
            i++;
            HashMap<String, Object> dataMap = new HashMap<>();
            String execSql = sql.trim();
            if (execSql.startsWith("#")) {
                continue;
            }
            Date begin = new Date();
            String flag = begin.getTime() + "";

            paimonExecLog.setId(flag);
            paimonExecLog.setCreater(SecurityUtils.getLoginUser().getUsername());
            paimonExecLog.setStatus("0");
            paimonExecLog.setCreatetime(flag);
            paimonExecLog.setExectimes("-");
            paimonExecLog.setSqlstr(sql);
            paimonExecLog.setOther1("-");
            paimonExecLogMapper.insertSelective(paimonExecLog);

            log.info("执行sql：" + execSql);

            try {
                List<Map<String, Object>> sqlData = pdPaimonExecUtils.executeSql(execSql, flag);
                dataMap.put("execTime", DateUtil.formatBetween(begin, new Date()));
                dataMap.put("data", sqlData);
                if (ObjectUtils.isEmpty(sqlData) || sqlData.isEmpty()) {
                    dataMap.put("data", "sql执行成功");
                }
                paimonExecLog.setStatus("2");
            } catch (Exception e) {
                e.printStackTrace();
                log.error(" xx = ", e);

                dataMap.put("execTime", DateUtil.formatBetween(begin, new Date()));
                dataMap.put("data", "sql执行失败：" + e.getMessage());
                paimonExecLog.setStatus("1");
                paimonExecLog.setOther1(e.getMessage());
            }
            String log = getLog(flag);
            dataMap.put("log", log);
            resultMap.put("[" + i + "]" + execSql, dataMap);
            if ("2".equals(paimonExecLog.getStatus())) {
                paimonExecLog.setOther1(log.substring(0, Math.min(log.length(), 4096)));
            }

            paimonExecLog.setExectimes(dataMap.get("execTime").toString());
            paimonExecLogMapper.updateByPrimaryKeySelective(paimonExecLog);
        }
        return resultMap;
    }

    private List<Map<String, Object>> responseToData(String responseData) {
        ArrayList<Map<String, Object>> results = new ArrayList<>();

        String[] responseDataArr = responseData.split("\n");
        String[] colNameArr = responseDataArr[1].split("\t");
        for (int i = 2; i < responseDataArr.length; i++) {
            LinkedHashMap<String, Object> resultMap = new LinkedHashMap<>();
            String[] dataArr = responseDataArr[i].split("\t");
            if (dataArr.length != colNameArr.length) {
                continue;
            }
            for (int j = 0; j < dataArr.length; j++) {
                resultMap.put(colNameArr[j], dataArr[j]);
            }
            results.add(resultMap);
        }
        return results;
    }

    private String initExecSql(PaimonCatalogParam paimonCatalogParam) {
        String types = paimonCatalogParam.getTypes();
        StringBuilder execSql = new StringBuilder("");
        String sql = "use " + paimonCatalogParam.getId() + "." + paimonCatalogParam.getDatabaseName() + ";"
                + paimonCatalogParam.getExecSql();
        execSql.append(sparkHome + "/bin/spark-sql ")
                .append("--master yarn ")
                .append("--conf spark.sql.extensions=org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions ");
        if ("hive".equals(types)) {
            execSql.append("--conf spark.sql.catalog.").append(paimonCatalogParam.getId()).append("=org.apache.paimon.spark.SparkSessionCatalog ")
                    .append("--conf spark.sql.catalog.").append(paimonCatalogParam.getId()).append(".type=hive ");
        } else {
            execSql.append("--conf spark.sql.catalog.").append(paimonCatalogParam.getId()).append("=org.apache.paimon.spark.SparkCatalog ")
                    .append("--conf spark.sql.catalog.").append(paimonCatalogParam.getId()).append(".type=hadoop ")
                    .append("--conf spark.sql.catalog.").append(paimonCatalogParam.getId()).append(".warehouse=")
                    .append(paimonCatalogParam.getHdfsurl()).append(" ");
        }
        execSql.append("--hiveconf hive.cli.print.header=true ")
                .append("-e \"").append(sql).append("\"");
        return execSql.toString();
    }

    private String getLog(String flag) {
        String resultStr = "";
        File file = new File(sqlLogPath);
        BufferedReader br = null;
        FileReader fr = null;
        try {
            fr = new FileReader(file);
            br = new BufferedReader(fr);
            String line;
            int i = 0;
            boolean begin = false;
            while ((line = br.readLine()) != null) {
                if (begin || line.contains("begin" + flag)) {
                    begin = true;
                }
                if (!begin) {
                    continue;
                }
                if (i == 1000 || line.contains("end" + flag)) {
                    break;
                }
                i++;
                resultStr = resultStr + line + "\n";
            }
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                Objects.requireNonNull(br).close();
                Objects.requireNonNull(fr).close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return resultStr;
    }

    public void copyTable(PaimonCopyTableParam paimonCopyTableParam) throws Exception {
        PaimonTableParam sourceTable = paimonCopyTableParam.getSourceTable();
        PaimonTableParam dstTable = paimonCopyTableParam.getDstTable();

        String sourceTableName = "" + sourceTable.getId() + "." + sourceTable.getDatabaseName() + "." + sourceTable.getTableName();
        String dstTableName = "" + dstTable.getId() + "." + dstTable.getDatabaseName() + "." + dstTable.getTableName();

        PDPaimonUtils sourcePaimonUtils = PDPaimonUtils.build(sourceTable.getId(), sourceTable.getTypes(),
                sourceTable.getHiveurl(), sourceTable.getHdfsurl(), hadoopUser, uploadHiveFilePath);
        if (!sourcePaimonUtils.isExitTable(sourceTable.getDatabaseName(), sourceTable.getTableName())) {
            throw new Exception("来源表已不存在，请重新选择");
        }
        PDPaimonUtils dstPaimonUtils = PDPaimonUtils.build(dstTable.getId(), dstTable.getTypes(),
                dstTable.getHiveurl(), dstTable.getHdfsurl(), hadoopUser, uploadHiveFilePath);
        if (dstPaimonUtils.isExitTable(dstTable.getDatabaseName(), dstTable.getTableName())) {
            throw new Exception("复制的目标表已存在存在，请重新填写目标表");
        }

        String showSourceCreateTableSql = "show create table " + sourceTableName;
        PDPaimonSparkUtils pdPaimonSparkUtils =
                PDPaimonSparkUtils.build(sourceTable.getId(), sourceTable.getTypes(), sourceTable.getHiveurl(),
                        sourceTable.getHdfsurl(), sourceTable.getDatabaseName(), hadoopUser);
        PDPaimonSparkUtils dstPDPaimonSparkUtils =
                PDPaimonSparkUtils.build(dstTable.getId(), dstTable.getTypes(), dstTable.getHiveurl(),
                        dstTable.getHdfsurl(), dstTable.getDatabaseName(), hadoopUser);

        String createDstTableSql = pdPaimonSparkUtils.executeSql(showSourceCreateTableSql).get(0).
                get("createtab_stmt").toString().split("LOCATION")[0].replace(sourceTableName, dstTableName);
        dstPDPaimonSparkUtils.executeSql(createDstTableSql);

        if (paimonCopyTableParam.getData()) {
            String copyDataSql = "insert into " + dstTableName + " select * from " + sourceTableName;
            dstPDPaimonSparkUtils.executeSql(copyDataSql);
        }

    }

}
