package com.central.scheduler.plugin.task.datax;

import com.central.common.entity.DataxTaskContent;
import com.central.common.entity.ReaderParams;
import com.central.common.entity.WriterParams;
import com.central.common.enums.DbType;
import com.central.common.enums.PrivilegeType;
import com.central.common.enums.WriteMode;
import com.central.common.properties.Property;
import com.central.common.utils.OSUtils;
import com.central.scheduler.plugin.task.api.AbstractTaskExecutor;
import com.central.scheduler.plugin.task.api.ShellCommandExecutor;
import com.central.scheduler.plugin.task.api.TaskResponse;
import com.central.scheduler.plugin.task.datax.json.ReaderJson;
import com.central.scheduler.plugin.task.datax.json.WriterJson;
import com.central.scheduler.plugin.task.datax.json.reader.HiveReaderJson;
import com.central.scheduler.plugin.task.datax.json.reader.MysqlReaderJson;
import com.central.scheduler.plugin.task.datax.json.writer.HiveWriterJson;
import com.central.scheduler.plugin.task.datax.json.writer.MysqlWriterJson;
import com.central.scheduler.spi.task.AbstractParameters;
import com.central.scheduler.spi.task.TaskConstants;
import com.central.scheduler.spi.task.paramparser.ParamUtils;
import com.central.scheduler.spi.task.paramparser.ParameterUtils;
import com.central.scheduler.spi.task.request.DataxTaskExecutionContext;
import com.central.scheduler.spi.task.request.TaskRequest;
import com.central.scheduler.spi.utils.JSONUtils;
import com.central.scheduler.spi.utils.PropertyUtils;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;

import java.io.File;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions;
import java.sql.*;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.central.scheduler.spi.task.TaskConstants.EXIT_CODE_FAILURE;
import static com.central.scheduler.spi.task.TaskConstants.RWXR_XR_X;


public class DataxTask extends AbstractTaskExecutor {
    /**
     * python process(datax only supports version 2.7 by default)
     */
    private static final String DATAX_PYTHON = "python2.7";
    private static final Pattern PYTHON_PATH_PATTERN = Pattern.compile("/bin/python[\\d.]*$");
    /**
     * datax pathe
     */
    private static final String DATAX_PATH = "${DATAX_HOME}/bin/datax.py";
    /**
     * datax channel count
     */
    private static final int DATAX_CHANNEL_COUNT = 1;

    /**
     * datax parameters
     */
    private DataxParameters dataXParameters;

    /**
     * shell command executor
     */
    private ShellCommandExecutor shellCommandExecutor;

    /**
     * taskExecutionContext
     */
    private TaskRequest taskExecutionContext;

    /**
     * constructor
     *
     * @param taskExecutionContext taskExecutionContext
     */
    public DataxTask(TaskRequest taskExecutionContext) {
        super(taskExecutionContext);
        this.taskExecutionContext = taskExecutionContext;

        this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle,
                taskExecutionContext, logger);
    }

    /**
     * init DataX config
     */
    @Override
    public void init() {
        logger.info("datax task params {}", taskExecutionContext.getTaskContent());
        DataxTaskContent taskContent = JSONUtils.parseObject(taskExecutionContext.getTaskContent(), DataxTaskContent.class);
        dataXParameters = new DataxParameters();
        dataXParameters.setReader(taskContent.getReader());
        dataXParameters.setWriter(taskContent.getWriter());
        dataXParameters.setSpeedChannel(taskContent.getSpeedChannel());
        dataXParameters.setSpeedByte(taskContent.getSpeedByte());
        dataXParameters.setSpeedRecord(taskContent.getSpeedRecord());
        dataXParameters.setErrorLimitRecord(taskContent.getErrorLimitRecord());
        dataXParameters.setErrorLimitPercentage(taskContent.getErrorLimitPercentage());
    }

    /**
     * run DataX process
     *
     * @throws Exception if error throws Exception
     */
    @Override
    public void handle() throws Exception {
        try {
            // set the name of the current thread
            String threadLoggerInfoName = String.format("TaskLogInfo-%s", taskExecutionContext.getTaskAppId());
            Thread.currentThread().setName(threadLoggerInfoName);
            //检查数据权限
            checkDataPrivilege();

            // replace placeholder,and combine local and global parameters
            Map<String, Property> paramsMap = ParamUtils.convert(taskExecutionContext, getParameters());
            if (MapUtils.isEmpty(paramsMap)) {
                paramsMap = new HashMap<>();
            }
            if (MapUtils.isNotEmpty(taskExecutionContext.getParamsMap())) {
                paramsMap.putAll(taskExecutionContext.getParamsMap());
            }

            // run datax procesDataSourceService.s
            String jsonFilePath = buildDataxJsonFile(paramsMap);
            String shellCommandFilePath = buildShellCommandFile(jsonFilePath);
            TaskResponse commandExecuteResult = shellCommandExecutor.run(shellCommandFilePath);

            setExitStatusCode(commandExecuteResult.getExitStatusCode());
            setAppIds(commandExecuteResult.getAppIds());
            setProcessId(commandExecuteResult.getProcessId());
        } catch (Exception e) {
            setExitStatusCode(EXIT_CODE_FAILURE);
            throw e;
        }
    }

    private void checkDataPrivilege() {
        //表权限校验是否开启
        boolean dataPrivilegeEnable = PropertyUtils.getBoolean(TaskConstants.DATA_PRIVILEGE_ENABLE, false);
        if (dataPrivilegeEnable) {
            ReaderParams reader = dataXParameters.getReader();
            //key is datasourceId.dbName.tableName
            String readerKey = reader.getDatasourceId() + "." + reader.getDbName() + "." + reader.getTbName();
            Map<String, Set<PrivilegeType>> readerTablePrivileges = taskExecutionContext.getDataxTaskExecutionContext().getReaderTablePrivileges();
            Set<PrivilegeType> readerPrivileges = readerTablePrivileges.getOrDefault(readerKey, new HashSet<>());
            if (!readerPrivileges.contains(PrivilegeType.SELECT)) {
                logger.error(readerKey + " privilege denied,need privileges is SELECT ,user have privileges is " + JSONUtils.toJsonString(readerPrivileges));
                throw new IllegalArgumentException();
            }
            WriterParams writer = dataXParameters.getWriter();
            //key is datasourceId.dbName.tableName
            String writerKey = writer.getDatasourceId() + "." + writer.getDbName() + "." + writer.getTbName();
            Map<String, Set<PrivilegeType>> writerTablePrivileges = taskExecutionContext.getDataxTaskExecutionContext().getWriterTablePrivileges();
            Set<PrivilegeType> writerPrivileges = writerTablePrivileges.getOrDefault(writerKey, new HashSet<>());
            WriteMode writeMode = dataXParameters.getWriter().getWriteMode();
            Set<PrivilegeType> needPrivilege = new HashSet<>();
            DbType writerDbType = taskExecutionContext.getDataxTaskExecutionContext().getWriterDatasource().getDatasourceType();
            if (writerDbType == DbType.HIVE) {
                needPrivilege.add(PrivilegeType.INSERT);
            } else if (writerDbType == DbType.MYSQL) {
                switch (writeMode) {
                    case insert:
                        needPrivilege.add(PrivilegeType.INSERT);
                        break;
                    case replace:
                        needPrivilege.add(PrivilegeType.INSERT);
                        needPrivilege.add(PrivilegeType.UPDATE);
                        break;
                    case update:
                        needPrivilege.add(PrivilegeType.UPDATE);
                        break;
                    default:
                        logger.error(writerDbType + " 数据源类型 writeModel 模式错误");
                        throw new IllegalArgumentException();
                }
            } else {
                logger.error(writerDbType + " 数据源类型暂不支持");
                throw new IllegalArgumentException();
            }
            if (!writerPrivileges.containsAll(needPrivilege)) {
                logger.error(readerKey + " privilege denied,need privileges is " + JSONUtils.toJsonString(needPrivilege) + " ,user have privileges is " + JSONUtils.toJsonString(writerPrivileges));
                throw new IllegalArgumentException();
            }
        }
    }

    /**
     * cancel DataX process
     *
     * @param cancelApplication cancelApplication
     * @throws Exception if error throws Exception
     */
    @Override
    public void cancelApplication(boolean cancelApplication)
            throws Exception {
        // cancel process
        shellCommandExecutor.cancelApplication();
    }

    /**
     * build datax configuration file
     *
     * @return datax json file name
     * @throws Exception if error throws Exception
     */
    private String buildDataxJsonFile(Map<String, Property> paramsMap)
            throws Exception {
        // generate json
        String fileName = String.format("%s/%s_job.json",
                taskExecutionContext.getExecutePath(),
                taskExecutionContext.getTaskAppId());
        String json;

        Path path = new File(fileName).toPath();
        if (Files.exists(path)) {
            return fileName;
        }


        ObjectNode job = JSONUtils.createObjectNode();
        job.putArray("content").addAll(buildDataxJobContentJson());
        job.set("setting", buildDataxJobSettingJson());

        ObjectNode root = JSONUtils.createObjectNode();
        root.set("job", job);
        json = root.toString();

        // replace placeholder
        json = ParameterUtils.convertParameterPlaceholders(json, ParamUtils.convert(paramsMap));

        logger.debug("datax job json : {}", json);

        // create datax json file
        FileUtils.writeStringToFile(new File(fileName), json, StandardCharsets.UTF_8);
        return fileName;
    }

    /**
     * build datax job config
     *
     * @return collection of datax job config JSONObject
     * @throws SQLException if error throws SQLException
     */
    private List<ObjectNode> buildDataxJobContentJson() {
        DataxTaskExecutionContext dataxTaskExecutionContext = taskExecutionContext.getDataxTaskExecutionContext();
        ObjectNode reader = JSONUtils.createObjectNode();
        ReaderJson readerParameter = null;
        DbType readerDbType = dataXParameters.getReader().getDbType();
        switch (readerDbType) {
            case MYSQL:
                reader.put("name", "mysqlreader");
                readerParameter = new MysqlReaderJson(dataXParameters.getReader(), dataxTaskExecutionContext.getReaderDatasource());
                reader.putPOJO("parameter", readerParameter);
                break;
            case HIVE:
                reader.put("name", "hivereader");
                readerParameter = new HiveReaderJson(dataXParameters.getReader(), dataxTaskExecutionContext.getReaderDatasource());
                reader.putPOJO("parameter", readerParameter);
                //download keytab and conf file
                String keytabUrl = dataxTaskExecutionContext.getReaderDatasource().getKeytabUrl();
                if (StringUtils.isNotEmpty(keytabUrl)) {
                    logger.info("downloading keytab file ......");
                    downloadResource(keytabUrl);
                    String[] keytabPaths = keytabUrl.split("/");
                    String keytabPath = this.taskExecutionContext.getExecutePath() + "/" + keytabPaths[keytabPaths.length - 1];
                    ((HiveReaderJson) readerParameter).setKerberosKeytabFilePath(keytabPath);
                    ((HiveReaderJson) readerParameter).setHaveKerberos(true);
                }
                String krb5ConfUrl = dataxTaskExecutionContext.getReaderDatasource().getKrb5ConfUrl();
                if (StringUtils.isNotEmpty(krb5ConfUrl)) {
                    logger.info("downloading krb5Conf file ......");
                    downloadResource(krb5ConfUrl);
                    String[] krb5ConfPaths = krb5ConfUrl.split("/");
                    String krb5ConfPath = this.taskExecutionContext.getExecutePath() + "/" + krb5ConfPaths[krb5ConfPaths.length - 1];
                    ((HiveReaderJson) readerParameter).setKerberosConfFilePath(krb5ConfPath);
                }
                break;
            default:
                logger.error(readerDbType + " 数据源类型暂不支持");
                throw new IllegalArgumentException();
        }

        ObjectNode writer = JSONUtils.createObjectNode();
        WriterJson writerParameter = null;
        DbType writerDbType = dataXParameters.getWriter().getDbType();
        switch (writerDbType) {
            case MYSQL:
                writer.put("name", "mysqlwriter");
                writerParameter = new MysqlWriterJson(dataXParameters.getWriter(), dataxTaskExecutionContext.getWriterDatasource());
                writer.putPOJO("parameter", writerParameter);
                break;
            case HIVE:
                writer.put("name", "hivewriter");
                writerParameter = new HiveWriterJson(dataXParameters.getWriter(), dataxTaskExecutionContext.getWriterDatasource());
                writer.putPOJO("parameter", writerParameter);
                //download keytab and conf file
                String keytabUrl = dataxTaskExecutionContext.getWriterDatasource().getKeytabUrl();
                if (StringUtils.isNotEmpty(keytabUrl)) {
                    logger.info("downloading keytab file ......");
                    downloadResource(keytabUrl);
                    String[] keytabPaths = keytabUrl.split("/");
                    String keytabPath = this.taskExecutionContext.getExecutePath() + "/" + keytabPaths[keytabPaths.length - 1];
                    ((HiveWriterJson) writerParameter).setKerberosKeytabFilePath(keytabPath);
                    ((HiveWriterJson) writerParameter).setHaveKerberos(true);
                }
                String krb5ConfUrl = dataxTaskExecutionContext.getWriterDatasource().getKrb5ConfUrl();
                if (StringUtils.isNotEmpty(krb5ConfUrl)) {
                    logger.info("downloading krb5Conf file ......");
                    downloadResource(krb5ConfUrl);
                    String[] krb5ConfPaths = krb5ConfUrl.split("/");
                    String krb5ConfPath = this.taskExecutionContext.getExecutePath() + "/" + krb5ConfPaths[krb5ConfPaths.length - 1];
                    ((HiveWriterJson) writerParameter).setKerberosConfFilePath(krb5ConfPath);
                }
                break;
            default:
                logger.error(readerDbType + " 数据源类型暂不支持");
                throw new IllegalArgumentException();
        }

        List<ObjectNode> contentList = new ArrayList<>();
        ObjectNode content = JSONUtils.createObjectNode();
        content.set("reader", reader);
        content.set("writer", writer);
        contentList.add(content);

        return contentList;
    }

    /**
     * build datax setting config
     *
     * @return datax setting config JSONObject
     */
    private ObjectNode buildDataxJobSettingJson() {

        ObjectNode speed = JSONUtils.createObjectNode();

        if (dataXParameters.getSpeedByte() > 0) {
            speed.put("channel", dataXParameters.getSpeedChannel());
        } else {
            speed.put("channel", 1);
        }
        if (dataXParameters.getSpeedByte() > 0) {
            speed.put("byte", dataXParameters.getSpeedByte());
        }

        if (dataXParameters.getSpeedRecord() > 0) {
            speed.put("record", dataXParameters.getSpeedRecord());
        }

        ObjectNode errorLimit = JSONUtils.createObjectNode();
        errorLimit.put("record", dataXParameters.getErrorLimitRecord());
        errorLimit.put("percentage", dataXParameters.getErrorLimitPercentage());

        ObjectNode setting = JSONUtils.createObjectNode();
        setting.set("speed", speed);
        setting.set("errorLimit", errorLimit);

        return setting;
    }


    /**
     * create command
     *
     * @return shell command file name
     * @throws Exception if error throws Exception
     */
    private String buildShellCommandFile(String jobConfigFilePath)
            throws Exception {
        // generate scripts
        String fileName = String.format("%s/%s_node.%s",
                taskExecutionContext.getExecutePath(),
                taskExecutionContext.getTaskAppId(),
                OSUtils.isWindows() ? "bat" : "sh");

        Path path = new File(fileName).toPath();

        if (Files.exists(path)) {
            return fileName;
        }

        // datax python command
        StringBuilder sbr = new StringBuilder();
        sbr.append(getPythonCommand());
        sbr.append(" ");
        sbr.append(DATAX_PATH);
        sbr.append(" ");
        sbr.append(jobConfigFilePath);

        // replace placeholder
        String dataxCommand = sbr.toString();

        logger.debug("raw script : {}", sbr.toString());

        // create shell command file
        Set<PosixFilePermission> perms = PosixFilePermissions.fromString(RWXR_XR_X);
        FileAttribute<Set<PosixFilePermission>> attr = PosixFilePermissions.asFileAttribute(perms);

        if (OSUtils.isWindows()) {
            Files.createFile(path);
        } else {
            Files.createFile(path, attr);
        }

        Files.write(path, dataxCommand.getBytes(), StandardOpenOption.APPEND);

        return fileName;
    }

    public String getPythonCommand() {
        String pythonHome = System.getenv("PYTHON_HOME");
        return getPythonCommand(pythonHome);
    }

    public String getPythonCommand(String pythonHome) {
        if (StringUtils.isEmpty(pythonHome)) {
            return DATAX_PYTHON;
        }
        String pythonBinPath = "/bin/" + DATAX_PYTHON;
        Matcher matcher = PYTHON_PATH_PATTERN.matcher(pythonHome);
        if (matcher.find()) {
            return matcher.replaceAll(pythonBinPath);
        }
        return Paths.get(pythonHome, pythonBinPath).toString();
    }


    @Override
    public AbstractParameters getParameters() {
        return dataXParameters;
    }

    private void notNull(Object obj, String message) {
        if (obj == null) {
            throw new RuntimeException(message);
        }
    }

    /**
     * download resource file
     *
     * @param url url
     */
    private void downloadResource(String url) {
        try {
            //使用wget命令 下载文件
            wgetCommand(url, this.taskExecutionContext.getExecutePath());
        } catch (Exception e) {
            logger.error(e.getMessage(), e);
            throw new RuntimeException(e.getMessage());
        }
    }
}
