package com.sh.data.engine.domain.datadev.offline.runner;

import cn.hutool.extra.spring.SpringUtil;
import com.alibaba.druid.DbType;
import com.alibaba.fastjson.JSONArray;
import com.google.common.collect.Lists;
import com.sh.data.engine.domain.authority.service.DataAuthService;
import com.sh.data.engine.domain.base.model.enums.Database;
import com.sh.data.engine.domain.common.service.CommonService;
import com.sh.data.engine.domain.dataasset.service.LineageService;
import com.sh.data.engine.domain.datadev.offline.model.domain.Param;
import com.sh.data.engine.domain.datadev.offline.model.enums.OfflineDevJobStatusEnum;
import com.sh.data.engine.domain.datadev.offline.service.DatadevFunctionService;
import com.sh.data.engine.domain.datadev.offline.service.DatadevScriptService;
import com.sh.data.engine.domain.shims.db.BaseDbManager;
import com.sh.data.engine.domain.shims.hive.util.HiveSqlParserUtil;
import com.sh.data.engine.domain.util.SqlParserUtil;
import com.sh.data.engine.domain.util.VarParserUtil;
import com.sh.data.engine.domain.workspace.manager.model.domain.UserDomain;
import com.sh.data.engine.domain.workspace.manager.service.UserService;
import com.sh.data.engine.repository.dao.authority.entity.ProjectTblEntity;
import com.sh.data.engine.repository.dao.datadev.entity.DatadevJobEntity;
import com.sh.data.engine.repository.dao.datadev.entity.DatadevMenuNodeEntity;
import com.sh.data.engine.repository.dao.datadev.entity.DatadevScriptEntity;
import com.sh.data.engine.repository.dao.workspace.manager.entity.StorageEntity;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.hadoop.hive.ql.parse.ParseException;
import org.apache.hive.jdbc.HiveStatement;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.stream.Collectors;

/**
 * @Author: zigui.zdf @Date: 2022/3/1 12:02:42 @Description:
 */
@Slf4j
public class OfflineDevSqlJobRunner extends OfflineDevBaseJobRunner {

    private CommonService commonService = SpringUtil.getBean(CommonService.class);

    private LineageService lineageService = SpringUtil.getBean(LineageService.class);

    private UserService userService = SpringUtil.getBean(UserService.class);

    private static final String BELONG_MDDATA = "2";

    protected DatadevScriptService datadevScriptService =
        SpringUtil.getBean(DatadevScriptService.class);

    private volatile Connection connection;

    private DatadevScriptEntity datadevScriptEntity;

    protected DataAuthService dataAuthService = SpringUtil.getBean(DataAuthService.class);

    public OfflineDevSqlJobRunner(
        DatadevJobEntity jobEntity,
        DatadevMenuNodeEntity menuNodeEntity,
        Map<Long, Integer> parentNodeIdAndLineStatusMap,
        List<Long> childNodeIdList,
        Map<Long, OfflineDevBaseJobRunner> allJobNodeMap,
        ExecutorService executorService) {
        super(
            jobEntity,
            menuNodeEntity,
            parentNodeIdAndLineStatusMap,
            childNodeIdList,
            allJobNodeMap,
            executorService);
    }

    public OfflineDevSqlJobRunner(
        DatadevJobEntity jobEntity,
        DatadevMenuNodeEntity menuNodeEntity,
        Map<Long, Integer> parentNodeIdAndLineStatusMap,
        List<Long> childNodeIdList,
        Map<Long, OfflineDevBaseJobRunner> allJobNodeMap,
        ExecutorService executorService,
        DatadevScriptEntity datadevScriptEntity) {
        super(
            jobEntity,
            menuNodeEntity,
            parentNodeIdAndLineStatusMap,
            childNodeIdList,
            allJobNodeMap,
            executorService);
        this.datadevScriptEntity = datadevScriptEntity;
    }

    @Override
    public void cancel() {
        try {
            if (null != connection && !connection.isClosed()) {
                connection.close();
            }
        } catch (SQLException e) {
            log.error("停止失败", e);
        }
    }

    @Override
    public int runJob() throws Exception {

        String logInfo = String.format("节点:[%s]开始执行", menuNodeEntity.getName());
        logInfo = formatLogWithTime(logInfo);
        this.writeLog(Lists.newArrayList(logInfo));

        String finishedLog = String.format("节点:[%s]结束执行", menuNodeEntity.getName());
        if (datadevScriptEntity == null) {
            datadevScriptEntity = datadevScriptService.getDatadevScriptByNodeId(menuNodeEntity.getId());
        }

        if (null == datadevScriptEntity) {
            String log2 = formatLogWithTime("该节点无配置");

            finishedLog = formatLogWithTime(finishedLog);

            this.writeLog(Lists.newArrayList(log2, finishedLog));

            return OfflineDevJobStatusEnum.SUCCESS.getCode();
        }

        Long projectId = jobEntity.getProjectId();
        String creatorId = jobEntity.getCreatorId();
        if (null == datadevScriptEntity) {
            String log2 = formatLogWithTime("该节点无配置");

            finishedLog = formatLogWithTime(finishedLog);

            this.writeLog(Lists.newArrayList(log2, finishedLog));

            return OfflineDevJobStatusEnum.SUCCESS.getCode();
        }

        List<String> runnableScripts = Lists.newArrayList();

        final StorageEntity storage = commonService.getStorage(jobEntity.getProjectId());
        String storageType = storage.getStorageType();
        if (storageType.equalsIgnoreCase("sap")) {
            storageType = "sapdb";
        } else if (storageType.equalsIgnoreCase("kingbase8")) {
            storageType = "kingbase";
        } else if (storageType.equalsIgnoreCase("DM")) {
            storageType = "dm";
        }
        DbType dbType = DbType.of(storageType.toLowerCase());
        if (dbType.equals(DbType.tidb)) {
            dbType = DbType.mysql;
        }

        String script = datadevScriptEntity.getScript();

        String param = datadevScriptEntity.getParam();
        List<Param> paramList = JSONArray.parseArray(param, Param.class);
        final BaseDbManager baseDbManager =
            commonService.getStorageDbManagerByProjctId(jobEntity.getProjectId());
        Database database = Database.from(storage.getStorageType());

        if (CollectionUtils.isEmpty(paramList)) {
            paramList = Lists.newArrayList();
        }
        final Map<String, String> stringMap =
            paramList.stream().collect(Collectors.toMap(x -> x.getParamName(), x -> x.getParamValue()));

        script = VarParserUtil.process(script, stringMap, null);

        List<String> scripts = Lists.newArrayList();
        if (dbType.equals(DbType.sapdb)) {
            scripts = SqlParserUtil.split(script, DbType.sapdb);
        } else {
            scripts = SqlParserUtil.split(script, dbType);
        }
        final SqlProcessor sqlProcessor = SqlFormatFactory.getSqlFormat(dbType);
        boolean isHiveType = false;
        List<String> functionSQL = Lists.newArrayList();
        if (database.equals(Database.HIVE)) {
            isHiveType = true;
        }
        for (String sc : scripts) {
            if (StringUtils.trim(sc).endsWith(";")) {
                sc = sc.substring(0, sc.lastIndexOf(";"));
            }
            runnableScripts.add(sc);
            final List<String> strings = sqlProcessor.format(sc, jobEntity.getProjectId(), true);
            functionSQL.addAll(strings);
        }
        if (CollectionUtils.isEmpty(runnableScripts)) {
            String log2 = formatLogWithTime("没有可执行的脚本");

            finishedLog = formatLogWithTime(finishedLog);

            this.writeLog(Lists.newArrayList(log2, finishedLog));

            return OfflineDevJobStatusEnum.SUCCESS.getCode();
        }

        Function<List<String>, Void> writeLog =
            (List<String> logs) -> {
                this.writeLog(logs);
                return null;
            };

        // 校验sql中表的读写权限
//    UserEntity userDomain = userService.getById(jobEntity.getCreatorId());
        // 不为管理员且没有表的权限
//    if (!userDomain.getIsAuth().equals(1) && !checkUserAndTableAuth(runnableScripts, database)) {
//      throw new BusinessException(StatusCode.NO_AUTH, "无权限操作脚本中的表");
//    }

        AtomicInteger atomicInteger = new AtomicInteger(1);
        // 校验密级，即使报错，也同样跳过
//        UserDomain userDomain = userService.getUserById(creatorId);
//        for (String runnableScript : runnableScripts) {
//            String table = null;
//            try {
//                table = secretMgtService.validSecretSQL(runnableScript, creatorId, projectId);
//            } catch (Exception e) {
//                writeLog(Lists.newArrayList(String.format("校验密级失败: %s，不影响任务", e.getMessage())));
//                log.error("{} 校验密级失败", e.getMessage());
//            }
//            if (Objects.nonNull(table)) {
//                throw new BusinessException(StatusCode.NO_AUTH,
//                    String.format(
//                        "当前用户:%s 无权限操作脚本中的表:%s 密级不符。智能提示：如果任务报错在周期运行中出现，请检查工作流的创建者的密级权限",
//                        userDomain.getUsername(), table));
//            }
//        }
        if (isHiveType) {
            functionSQL.addAll(runnableScripts);
            runnableScripts = functionSQL;
        }
        try {
            connection = baseDbManager.getConnection();
            for (String runnableScript : runnableScripts) {
                if (isStop) {
                    break;
                }
                String belong = "1";
                // 如果是主数据
                if (belong.equals(BELONG_MDDATA)) {
                    boolean r1 = lineageService.checkSQLUpdateMd(runnableScript, database);
                    if (r1) {
                        String log2 = formatLogWithTime("主数据不支持update、alter等操作");

                        finishedLog = formatLogWithTime(finishedLog);

                        this.writeLog(Lists.newArrayList(log2, finishedLog));

                        return OfflineDevJobStatusEnum.AUTO_INTERRUPT.getCode();
                    }
                }

                // 对手动执行的查询SQL添加limit
                Integer execType = jobEntity.getExecType();
                if (execType == 0) {
                    try {
                        String executeSQL = lineageService.checkIsSelectSql(runnableScript, database);
                        if (StringUtils.isNotBlank(executeSQL)) {
                            runnableScript = executeSQL;
                        }
                    } catch (Exception e) {
                        log.error("{} 判断SQL语句失败,不影响SQL执行", e.getMessage());
                    }
                }
                sqlProcessor.executeSql(
                    this,
                    runnableScript,
                    getJobEntity().getExecType(),
                    connection,
                    atomicInteger,
                    writeLog);

                atomicInteger.incrementAndGet();
                // 血缘分析
                try {
                    lineageSqlRelation(runnableScript, database, projectId, jobEntity);
                } catch (RuntimeException e) {
                    writeLog(
                        Lists.newArrayList(String.format("解析血缘失败: %s ，不影响任务", e.getMessage())));
                    log.error("{} 解析血缘失败", e.getMessage());
                }
            }
        } finally {
            this.cancel();
        }
        return OfflineDevJobStatusEnum.SUCCESS.getCode();
    }

    private void lineageSqlRelation(
        String runnableScript, Database database, Long projectId, DatadevJobEntity jobEntity) {
        // sap 不走druid 血缘分析

        if (Database.SAP.equals(database)) {
            lineageService.analySapLineageAndInsert(runnableScript, projectId, jobEntity);
        } else {
            lineageService.analyseAndInsertSqlLineage(runnableScript, database, projectId, jobEntity);
        }

        //    // elt操作往tblPro插入记录
        //    String s = lineageService.analyCreatetable(runnableScript, database);
        //    if (StringUtils.isNotBlank(s)){
        //      ExternalSaveModelParam externalSaveModelParam = new ExternalSaveModelParam();
        ////      modelService.externalSaveNewModel()
        //    }
    }

    private boolean checkUserAndTableAuth(List<String> runnableScripts, Database database) {

        Long projectId = jobEntity.getProjectId();
        for (String runnableScript : runnableScripts) {
            Map<String, Set<Long>> stringSetMap =
                lineageService.analyseSqlUsedAuthority(runnableScript, database, projectId);
            Set<Long> read = stringSetMap.get("read");
            List<Long> readList = Lists.newArrayList(read);
            List<ProjectTblEntity> someTblAuth =
                dataAuthService.getSomeTblAuth(projectId, jobEntity.getCreatorId(), readList);
            for (ProjectTblEntity projectTblEntity : someTblAuth) {
                if (projectTblEntity.getAuth().equals("2")) {
                    return false;
                }
            }

            Set<Long> write = stringSetMap.get("write");
            List<Long> writeList = Lists.newArrayList(write);
            List<ProjectTblEntity> someTblAuth1 =
                dataAuthService.getSomeTblAuth(projectId, jobEntity.getCreatorId(), writeList);
            for (ProjectTblEntity projectTblEntity : someTblAuth1) {
                if (Objects.isNull(projectTblEntity.getAuth()) || projectTblEntity.getAuth()
                    .equals("1")) {
                    continue;
                }
                return false;
            }
        }
        return true;
    }
}

interface SqlProcessor {

    /**
     * 对sql做进一步的处理
     *
     * @param sql
     * @param projectId
     * @return
     */
    List<String> format(String sql, Long projectId, boolean flag);

    /**
     * 执行sql
     *
     * @param sql
     * @param connection
     * @param fileIndex
     * @throws Exception
     */
    void executeSql(
        OfflineDevBaseJobRunner offlineDevBaseJobRunner,
        String sql,
        int execType,
        Connection connection,
        AtomicInteger fileIndex,
        Function<List<String>, Void> writeLogFunction)
        throws Exception;
}

@Slf4j
class HiveSqlProcessor implements SqlProcessor {

    private DatadevFunctionService offlineDevFunctionService =
        SpringUtil.getBean(DatadevFunctionService.class);

    @Override
    public List<String> format(String sql, Long projectId, boolean flag) {
        List<String> sqls = new ArrayList<>();

        if (flag) {
            // 获取自定义函数信息
            Set<String> functions = null;
            try {
                functions = HiveSqlParserUtil.getFunction(sql);
            } catch (ParseException e) {
                log.error("从sql中提取函数异常", e);
                throw new RuntimeException(e);
            }

            if (CollectionUtils.isNotEmpty(functions)) {

                List<String> createFunctionSql =
                    offlineDevFunctionService.getHiveCreateFunctionSql(
                        functions, projectId, new HashSet<>());

                sqls.addAll(createFunctionSql);
            }
        }
        return sqls;
    }

    @Override
    public void executeSql(
        OfflineDevBaseJobRunner offlineDevBaseJobRunner,
        String sql,
        int execType,
        Connection connection,
        AtomicInteger fileIndex,
        Function<List<String>, Void> writeLogFunction)
        throws Exception {

        DatadevJobEntity jobEntity = offlineDevBaseJobRunner.getJobEntity();
        CountDownLatch latch = new CountDownLatch(1);
        try (HiveStatement stmt = (HiveStatement) connection.createStatement()) {
            processLog(stmt, latch, writeLogFunction);
            final boolean hasRs = stmt.execute(sql);
            // 工作流任务不写结果
            if (Objects.isNull(jobEntity.getTaskFlowJobId()) && hasRs && execType == 0) {
                int i = fileIndex.get();
                ResultSet rs = stmt.getResultSet();
                offlineDevBaseJobRunner.toResult(rs, i, sql);
                rs.close();
            }
            await(latch);
        } catch (Exception e) {
            await(latch);
            offlineDevBaseJobRunner.writeLog(Lists.newArrayList(e.getMessage()));
            throw e;
        }
    }

    private void await(CountDownLatch latch) {
        try {
            latch.await();
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
    }

    private void processLog(
        HiveStatement stmt, CountDownLatch latch, Function<List<String>, Void> writeLogFunction) {
        new Thread(
            () -> {
                try {
                    while (!stmt.isClosed() && stmt.hasMoreLogs()) {
                        List<String> logList = stmt.getQueryLog();
                        if (CollectionUtils.isEmpty(logList)) {
                            try {
                                TimeUnit.MICROSECONDS.sleep(100);
                            } catch (Exception ignore) {
                            }
                            continue;
                        }
                        writeLogFunction.apply(logList);
                    }
                } catch (Exception e) {
                    log.error("日志读取失败{}", e.getMessage());
                } finally {
                    latch.countDown();
                }
            })
            .start();
    }
}

@Slf4j
class OtherSqlProcessor implements SqlProcessor {

    @Override
    public List<String> format(String sql, Long projectId, boolean flag) {
        return Lists.newArrayList(sql);
    }

    private void await(CountDownLatch latch) {
        try {
            latch.await();
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
    }

    @Override
    public void executeSql(
        OfflineDevBaseJobRunner offlineDevBaseJobRunner,
        String sql,
        int execType,
        Connection connection,
        AtomicInteger fileIndex,
        Function<List<String>, Void> writeLogFunction)
        throws Exception {
        DatadevJobEntity jobEntity = offlineDevBaseJobRunner.getJobEntity();
        String querySql = "\n> execute sql : " + sql;
        String time = DateFormatUtils.format(new Date(), "yyyy-MM-dd HH:mm:ss.SSS");
        String format = String.format("[%s] %s", time, querySql);
        List<String> logInfo = Lists.newArrayList(format);
        try (PreparedStatement preparedStatement = connection.prepareStatement(sql)) {
            // 对手动执行的SQL进行
            boolean hasRs = preparedStatement.execute();
            long l = System.currentTimeMillis();
            if (Objects.isNull(jobEntity.getTaskFlowJobId()) && hasRs && execType == 0) {
                int i = fileIndex.get();
                ResultSet rs = preparedStatement.getResultSet();
                offlineDevBaseJobRunner.toResult(rs, i, sql);
                rs.close();
            } else {
                try {
                    int updateCount = preparedStatement.getUpdateCount();
                    logInfo.add("> Affected rows ：" + updateCount);
                } catch (Exception e) {
                    log.error(e.getMessage());
                }
            }
            String queryStatus = "> execute status: ok ";
            long l2 = System.currentTimeMillis();
            String queryTime = "> query time: " + (l2 - l) / 1000.0d + " s";
            logInfo.add(queryStatus);
            logInfo.add(queryTime);
            writeLogFunction.apply(logInfo);
        } catch (Exception e) {
            logInfo.add(e.getMessage());
            offlineDevBaseJobRunner.writeLog(logInfo);
            throw e;
        }
    }
}

class SqlFormatFactory {

    static SqlProcessor getSqlFormat(DbType dbType) {
        switch (dbType) {
            case hive:
                return new HiveSqlProcessor();
            default:
                return new OtherSqlProcessor();
        }
    }
}
