package com.central.scheduler.plugin.task.flink.sql;

import com.central.common.constant.CommonConstant;
import com.central.common.entity.*;
import com.central.common.enums.DbType;
import com.central.common.enums.PrivilegeType;
import com.central.common.model.ColumnInfo;
import com.central.common.model.ResourceInfo;
import com.central.common.utils.OSUtils;
import com.central.scheduler.plugin.task.api.AbstractTaskExecutor;
import com.central.scheduler.plugin.task.api.AbstractYarnTask;
import com.central.scheduler.plugin.task.api.ShellCommandExecutor;
import com.central.scheduler.plugin.task.api.TaskResponse;
import com.central.scheduler.plugin.task.datasource.PasswordUtils;
import com.central.scheduler.plugin.task.flink.sql.utils.FlinkArgsUtils;
import com.central.scheduler.spi.task.AbstractParameters;
import com.central.scheduler.spi.task.TaskConstants;
import com.central.scheduler.spi.task.paramparser.ParameterUtils;
import com.central.scheduler.spi.task.request.DataxTaskExecutionContext;
import com.central.scheduler.spi.task.request.TaskRequest;
import com.central.scheduler.spi.task.request.UdfFuncRequest;
import com.central.scheduler.spi.utils.JSONUtils;
import com.central.scheduler.spi.utils.PropertyUtils;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;

import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions;
import java.sql.SQLException;
import java.util.*;
import java.util.regex.Matcher;
import static com.central.scheduler.spi.task.TaskConstants.*;

/**
 * @author Tindy
 * @date 2022/4/13
 * @describe
 */
public class FlinkSqlTask  extends AbstractYarnTask {

    /**
     * flink sql parameters
     */
    private FlinkSqlParameters flinkSqlParameters;

    /**
     * shell command executor
     */
    private ShellCommandExecutor shellCommandExecutor;

    /**
     * taskExecutionContext
     */
    private TaskRequest taskExecutionContext;

    /**
     * constructor
     *
     * @param taskExecutionContext taskExecutionContext
     */
    public FlinkSqlTask(TaskRequest taskExecutionContext) {
        super(taskExecutionContext);
        this.taskExecutionContext = taskExecutionContext;
    }

    /**
     * init flink sql config
     */
    @Override
    public void init() {
        logger.info("flink sql task params {}", taskExecutionContext.getTaskContent());
        FlinkSqlTaskContent taskContent = JSONUtils.parseObject(taskExecutionContext.getTaskContent(), FlinkSqlTaskContent.class);
        flinkSqlParameters = new FlinkSqlParameters();
        flinkSqlParameters.setSql(taskContent.getSql());
        flinkSqlParameters.setSetParams(taskContent.getSetParams());
        flinkSqlParameters.setRunOptions(taskContent.getRunOptions());
        flinkSqlParameters.getRunOptions().setQueue(taskExecutionContext.getFlinkSqlTaskExecutionContext().getQueue());
        setMainJarName();
        setExtJarPaths();
    }

    /**
     * 设置第三方jar包url（包含udf jar包）
     */
    private void setExtJarPaths() {
        List<String> extJarPaths = new ArrayList<>();
        //添加作业指定的第三方包
        List<ResourceInfo> resources = taskExecutionContext.getResources();
        for (ResourceInfo resource : resources) {
            if(resource.getName().toLowerCase().endsWith(".jar")){
                extJarPaths.add(resource.getFileUrl());
            }
        }
        //添加udf jar包
        List<UdfFuncRequest> udfs = taskExecutionContext.getFlinkSqlTaskExecutionContext().getUdfs();
        for (UdfFuncRequest udf : udfs) {
            extJarPaths.add(udf.getResourceUrl());
        }
        flinkSqlParameters.getRunOptions().setExtJarPaths(extJarPaths);
    }


    @Override
    protected String buildCommand() {
        //检查数据权限
        checkDataPrivilege();
        //创建sql脚本
        String sqlFilePath = null;
        try {
            sqlFilePath = buildFlinkSqlFile();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
        flinkSqlParameters.getRunOptions().setMainArgs("-sql "+sqlFilePath);
        // flink run [OPTIONS] <jar-file> <arguments>
        List<String> args = new ArrayList<>();

        args.add(PropertyUtils.getString(FLINK_CMD_PATH,"flink"));
        args.add("run");
        // other parameters
        args.addAll(FlinkArgsUtils.buildArgs(flinkSqlParameters.getRunOptions()));

        logger.info("flink task args : {}", args);

        String command = String.join(" ", args);
        logger.info("flink task command : {}", command);
        return command;
    }

    @Override
    protected void setMainJarName() {
        flinkSqlParameters.getRunOptions().setMainJar(PropertyUtils.getString(FLINK_SQL_MAIN_JAR_PATH));
        flinkSqlParameters.getRunOptions().setMainClass(PropertyUtils.getString(FLINK_SQL_MAIN_CLASS,"com.flink.streaming.core.JobApplication"));
    }


    private void checkDataPrivilege() {
        //表权限校验是否开启
        boolean dataPrivilegeEnable = PropertyUtils.getBoolean(TaskConstants.DATA_PRIVILEGE_ENABLE, false);
        if (dataPrivilegeEnable) {
            List<Map<String, Set<PrivilegeType>>> needPrivileges = taskExecutionContext.getFlinkSqlTaskExecutionContext().getNeedPrivileges();
            Map<String, Set<PrivilegeType>> tablePrivileges = taskExecutionContext.getFlinkSqlTaskExecutionContext().getTablePrivileges();
            for (Map<String, Set<PrivilegeType>> needPrivilegeMap : needPrivileges) {
                for (String table : needPrivilegeMap.keySet()) {
                    Set<PrivilegeType> needPrivilege = needPrivilegeMap.get(table);
                    Set<PrivilegeType> havePrivilege = tablePrivileges.get(table);
                    if(havePrivilege==null || !havePrivilege.containsAll(needPrivilege)){
                        logger.error(table+ " privilege denied,need privileges is "+JSONUtils.toJsonString(needPrivilege)+" ,user have privileges is " + JSONUtils.toJsonString(havePrivilege));
                        throw new IllegalArgumentException();
                    }
                }
            }
        }
    }


    /**
     * build flink sql file
     *
     * @return datax json file name
     * @throws Exception if error throws Exception
     */
    private String buildFlinkSqlFile() throws IOException {
        // generate json
        String fileName = String.format("%s/%s_job.sql",
                taskExecutionContext.getExecutePath(),
                taskExecutionContext.getTaskAppId());

        Path path = new File(fileName).toPath();
        if (Files.exists(path)) {
            return fileName;
        }
        StringBuilder sqls=new StringBuilder();
        //set sql语句
        Map<String, String> setParams = flinkSqlParameters.getSetParams();
        for (String prop : setParams.keySet()) {
            sqls.append("set "+prop+" = "+setParams.get(prop)+";\n");
        }

        //创建 udf sql
        List<UdfFuncRequest> udfs = taskExecutionContext.getFlinkSqlTaskExecutionContext().getUdfs();
        for (UdfFuncRequest udf : udfs) {
            sqls.append("CREATE FUNCTION "+udf.getFuncName()+" AS '"+udf.getClassName()+"';\n");
        }


        //创建映射表sql
        Map<String, FlinkTable> tableMap = taskExecutionContext.getFlinkSqlTaskExecutionContext().getTableMap();
        for (String tableAlias : tableMap.keySet()) {
            FlinkTable table = tableMap.get(tableAlias);
            switch (table.getDatasource().getDatasourceType()){
                case KAFKA:
                    sqls.append(buildKafkaTableSql(tableAlias,table));
                    break;
                case MYSQL:
                    sqls.append(buildMysqlTableSql(tableAlias,table));
                    break;
//                case ORACLE:
//                    createTableSqls.add(buildOracleTableSql(table));
//                    break;
//                case HIVE:
//                    createTableSqls.add(buildHiveTableSql(table));
//                    break;
                default:
                    throw new IllegalArgumentException(table.getDatasource().getDatasourceType()+"数据源类型暂不支持");
            }
        }


        //sql
        sqls.append(flinkSqlParameters.getSql());

        // create sql file
        FileUtils.writeStringToFile(new File(fileName), sqls.toString(), StandardCharsets.UTF_8);
        return fileName;
    }

    private String buildMysqlTableSql(String tableAlias,FlinkTable table) {
        StringBuilder sql = new StringBuilder("CREATE TABLE ");
        sql.append(tableAlias + " (\n");
        List<ColumnInfo> columns = table.getTableInfo().getColumns();
        for (ColumnInfo column : columns) {
            sql.append("    ").append(column.getColumnName()).append(" ");
            switch (column.getDataType().toUpperCase()){
                case "BIGINT":
                    sql.append("BIGINT");
                    break;
                case "BINARY":
                    sql.append("BINARY");
                    if (StringUtils.isNotEmpty(column.getArgs())) {
                        sql.append("(").append(column.getArgs()).append(")");
                    }
                    break;
                case "BIT":
                case "SMALLINT":
                    sql.append("SMALLINT");
                    break;
                case "BLOB":
                case  "LONGBLOB":
                case      "MEDIUMBLOB":
                case         "TINYBLOB":
                    sql.append("BYTES");
                    break;
                case "CHAR":
                    sql.append("CHAR");
                    if (StringUtils.isNotEmpty(column.getArgs())) {
                        sql.append("(").append(column.getArgs()).append(")");
                    }
                    break;
                case "DATE":
                    sql.append("DATE");
                    break;
                case "DECIMAL":
                    sql.append("DECIMAL");
                    if (StringUtils.isNotEmpty(column.getArgs())) {
                        sql.append("(").append(column.getArgs()).append(")");
                    }
                    break;
                case "DOUBLE":
                    sql.append("DECIMAL");
                    break;
                case "FLOAT":
                    sql.append("FLOAT");
                    break;
                case "INT":
                case "MEDIUMINT":
                    sql.append("INT");
                    break;
                case "ENUM":
                case "JSON":
                case "LINESTRING":
                case "MEDIUMTEXT":
                case "MULTILINESTRING":
                case "TEXT":
                case "TINYTEXT":
                case "YEAR":
                    sql.append("STRING");
                    break;
                case "TIME":
                    sql.append("TIME");
                    break;
                case "DATETIME":
                case "TIMESTAMP":
                    sql.append("TIMESTAMP");
                    break;
                case "TINYINT":
                    sql.append("TINYINT");
                    break;
                case "VARBINARY":
                    sql.append("VARBINARY");
                    if (StringUtils.isNotEmpty(column.getArgs())) {
                        sql.append("(").append(column.getArgs()).append(")");
                    }
                    break;
                case "VARCHAR":
                    sql.append("VARCHAR");
                    if (StringUtils.isNotEmpty(column.getArgs())) {
                        sql.append("(").append(column.getArgs()).append(")");
                    }
                    break;
                default:
                    throw new IllegalArgumentException("mysql "+column.getDataType().toUpperCase()+"数据类型未能匹配flinksql数据类型");
            }
            sql.append(",\n");
        }
        if (CollectionUtils.isNotEmpty(table.getPrimaryKeys())) {
            sql.append("PRIMARY KEY (").append(String.join(",", table.getPrimaryKeys())).append(") NOT ENFORCED");
        } else {
            sql.deleteCharAt(sql.length() - 2);
        }
        sql.append("\n)").append("WITH (\n");
        if(table.getIsCdc()){
            sql.append("    'connector' = 'mysql-cdc',\n");
            sql.append("    'hostname' = '"+table.getDatasource().getHost()+"',\n");
            sql.append("    'port' = '"+table.getDatasource().getPort()+"',\n");
            sql.append("    'database-name' = '"+table.getDbName()+"',\n");
        }else {
            sql.append("    'connector' = 'jdbc',\n");
            sql.append("    'url' = 'jdbc:mysql://"+table.getDatasource().getHost()+":"+table.getDatasource().getPort()+"/"+table.getDbName()+"?characterEncoding=UTF-8',\n");
        }
        sql.append("    'username' = '"+table.getDatasource().getUsername()+"',\n");
        String password = PasswordUtils.decodePassword(table.getDatasource().getPassword());
        sql.append("    'password' = '"+password+"',\n");
        sql.append("    'table-name' = '"+table.getTableInfo().getTableName()+"',\n");
        for (String key : table.getWithParams().keySet()) {
            sql.append("    '"+key+"' = '"+table.getWithParams().get(key)+"',\n");
        }
        sql.deleteCharAt(sql.length() - 2);
        sql.append(");\n");
        return sql.toString();
    }
    private String buildKafkaTableSql(String tableAlias,FlinkTable table) {
        StringBuilder sql = new StringBuilder("CREATE TABLE ");
        sql.append(tableAlias + " (\n");
        List<ColumnInfo> columns = table.getTableInfo().getColumns();
        for (ColumnInfo column : columns) {
            sql.append("    ").append(column.getColumnName()).append(" ").append(column.getDataType());
            if (StringUtils.isNotEmpty(column.getArgs())) {
                sql.append("(").append(column.getArgs()).append(")");
            }
            sql.append(",\n");
        }
        sql.deleteCharAt(sql.length() - 2);
        sql.append("\n) ").append("WITH (\n");
        sql.append("    'connector' = 'kafka',\n")
                .append("    'topic' = '" + table.getDbName() + "." + table.getTableInfo().getTableName() + "',\n")
                .append("    'properties.bootstrap.servers' = '" + table.getDatasource().getHost() + "',\n")
                .append("    'format' = 'json',\n");
        if (StringUtils.isNotBlank(table.getDatasource().getKeytabUrl()) && StringUtils.isNotBlank(table.getDatasource().getKrb5ConfUrl())) {
            sql.append("    'properties.keytab.url' = '" + table.getDatasource().getKeytabUrl() + "',\n")
                    .append("    'properties.krb5conf.url' = '" + table.getDatasource().getKrb5ConfUrl() + "',\n")
                    .append("    'properties.principal' = '" + table.getDatasource().getUsername() + "',\n");
        }
        if(table.getWithParams().get("properties.group.id")==null){
            sql.append("    'properties.group.id' = '" + taskExecutionContext.getTaskName() + "',\n");
        }
        if(table.getWithParams().get("properties.group.id")==null){
            sql.append("    'scan.startup.mode' = 'group-offsets',\n");
        }

        for (String key : table.getWithParams().keySet()) {
            sql.append("    '"+key+"' = '"+table.getWithParams().get(key)+"',\n");
        }
        sql.deleteCharAt(sql.length() - 2);
        sql.append(");\n");
        return sql.toString();
    }


    /**
     * create command
     *
     * @return shell command file name
     * @throws Exception if error throws Exception
     */
    private String buildShellCommandFile(String sqlFilePath)
            throws Exception {
        // generate scripts
        String fileName = String.format("%s/%s_node.%s",
                taskExecutionContext.getExecutePath(),
                taskExecutionContext.getTaskAppId(),
                OSUtils.isWindows() ? "bat" : "sh");

        Path path = new File(fileName).toPath();

        if (Files.exists(path)) {
            return fileName;
        }

        //下载用户keytab文件
        String userKtUrl = taskExecutionContext.getFlinkSqlTaskExecutionContext().getUserKtUrl();
        downloadResource(userKtUrl);
        String[] keytabPaths = userKtUrl.split("/");
        String keytabPath = this.taskExecutionContext.getExecutePath() + "/" + keytabPaths[keytabPaths.length - 1];

        // datax python command
        StringBuilder sbr = new StringBuilder();
        sbr.append("kinit -kt ");
        sbr.append(keytabPath);
        sbr.append(" ");
        sbr.append(CommonConstant.KA_PREFIX+taskExecutionContext.getProName()+"_"+taskExecutionContext.getUsername()+"/"+CommonConstant.KA_PREFIX+taskExecutionContext.getProName());
        sbr.append("\n");
        sbr.append(PropertyUtils.getString(FLINK_CMD_PATH,"flink"));
        sbr.append(" ");
        sbr.append(sqlFilePath);

        // replace placeholder
        String dataxCommand = sbr.toString();

        logger.debug("raw script : {}", dataxCommand);

        // create shell command file
        Set<PosixFilePermission> perms = PosixFilePermissions.fromString(RWXR_XR_X);
        FileAttribute<Set<PosixFilePermission>> attr = PosixFilePermissions.asFileAttribute(perms);

        if (OSUtils.isWindows()) {
            Files.createFile(path);
        } else {
            Files.createFile(path, attr);
        }

        Files.write(path, dataxCommand.getBytes(), StandardOpenOption.APPEND);

        return fileName;
    }


    @Override
    public AbstractParameters getParameters() {
        return flinkSqlParameters;
    }

    /**
     * download resource file
     *
     * @param url url
     */
    private void downloadResource(String url) {
        try {
            //使用wget命令 下载文件
            wgetCommand(url, this.taskExecutionContext.getExecutePath());
        } catch (Exception e) {
            logger.error(e.getMessage(), e);
            throw new RuntimeException(e.getMessage());
        }
    }
}
