package com.sh.data.engine.common.util;

import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.avatica.util.Casing;
import org.apache.calcite.avatica.util.Quoting;
import org.apache.calcite.config.Lex;
import org.apache.calcite.sql.*;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.util.SqlShuttle;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.JobStatus;
import org.apache.flink.sql.parser.impl.FlinkSqlParserImpl;
import org.buildobjects.process.ProcBuilder;
import org.buildobjects.process.ProcResult;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.stream.Collectors;

@Slf4j
public class FlinkUtil {

    //  private static final String HDFS_CMD = "/usr/bin/hdfs";
    //
    //  private static final String YARN_CMD = "/usr/bin/yarn";

    private static final String FLINK_CMD = "/data/flink/flink-1.19.1/bin/sql-client.sh";

    private static final String FLINK_JAR = "/data/flink/flink-1.19.1/bin/flink ";

    private static final String FLINK_TASK_CLIENT_JAR =
        "/data/flink/data-engine-flink-task-client.jar";

    private static final String FLINK_TASK_CLIENT_MAIN_CLASS =
        "com.sh.data.engine.flink.client.FlinkClient";

    private static final String CHECK_POINTS_BASE_DIR = "/data/data_engine_file_storage/checkpoint";

    private static final String SAVE_POINTS_BASE_DIR = "/data/data_engine_file_storage/savepoint";

    private static final Integer CHECK_POINTS_MAX_NUM = 20;

    // private static final String FLINK_APP_BASE_DIR = "/user/flink/.flink";

    private static final String SAVE_POINTS_KEY = "state.savepoints.dir";

    private static final String CHECK_POINTS_KEY = "state.checkpoints.dir";

    private static final String CHECK_POINTS_NUM_RETAINED = "state.checkpoints.num-retained";

    private static final Joiner SPACE_JOINER = Joiner.on(" ");

    public static void verifySql(String sqlStr) throws SqlParseException {
        splitSql(sqlStr);
    }

    public static List<String> splitSql(String sqlStr) throws SqlParseException {
        if (StringUtils.isBlank(sqlStr)) {
            return Collections.emptyList();
        }

        SqlParser sqlParser = createSqlParser(sqlStr);
        SqlNodeList sqlNodeList = sqlParser.parseStmtList();
        if (sqlNodeList == null || CollectionUtils.isEmpty(sqlNodeList.getList())) {
            return Collections.emptyList();
        }

        return sqlNodeList.getList().stream().map(SqlNode::toString).collect(Collectors.toList());
    }

    public static List<String> getFunctionList(String sqlStr) throws Exception {
        if (StringUtils.isBlank(sqlStr)) {
            return Collections.emptyList();
        }

        SqlParser sqlParser = createSqlParser(sqlStr);
        SqlNodeList sqlNodeList = sqlParser.parseStmtList();
        if (sqlNodeList == null || CollectionUtils.isEmpty(sqlNodeList.getList())) {
            return Collections.emptyList();
        }

        FunctionVisitor functionVisitor = new FunctionVisitor();
        sqlNodeList.getList().forEach(e -> e.accept(functionVisitor));
        return functionVisitor.getFunctionList();
    }

    public static Properties readPropertiesFromString(String str) throws IOException {
        Properties properties = new Properties();
        if (StringUtils.isBlank(str)) {
            return properties;
        }
        try (ByteArrayInputStream stream =
                 new ByteArrayInputStream(str.getBytes(StandardCharsets.UTF_8.name()))) {
            properties.load(stream);
        }
        return properties;
    }

    public static Properties getDefaultProperties() {
        Properties properties = new Properties();

        // 设置默认的taskmanager和jobmanager内存大小
        properties.put("jobmanager.memory.process.size", "768m");
        properties.put("taskmanager.memory.process.size", "1024m");
        // 设置默认的slot数量
        //properties.put("taskmanager.numberOfTaskSlots", 1);

        return properties;
    }

    public static synchronized FlinkJobResult startJob(StartJobParam param) throws Exception {
        List<String> argList = new ArrayList<>();
        argList.add(FLINK_JAR);
        argList.add("run");

        if (!param.isDebug) {
            // set default checkpoints & savepoints dir
            argList.add(
                "-D"
                    + CHECK_POINTS_KEY
                    + "=file://"
                    + CHECK_POINTS_BASE_DIR
                    + "/job_"
                    + param.getEngineJobId());
            argList.add(
                "-D"
                    + SAVE_POINTS_KEY
                    + "=file://"
                    + SAVE_POINTS_BASE_DIR
                    + "/job_"
                    + param.getEngineJobId());
            argList.add("-D" + CHECK_POINTS_NUM_RETAINED + "=" + CHECK_POINTS_MAX_NUM);

            // recover from savepoint
            if (StringUtils.isNotBlank(param.getSavePoint())) {
                argList.add("-s");
                argList.add("file://" + param.getSavePoint());
            }
        }

        // add ext param
        // refer to https://ci.apache.org/projects/flink/flink-docs-release-1.13/docs/deployment/config/
        if (param.getExtProperties() != null) {
            param
                .getExtProperties()
                .forEach(
                    (k, v) -> {
                        String key = StringUtils.trim(Objects.toString(k, null));
                        String value = StringUtils.trim(Objects.toString(v, null));
                        if (StringUtils.isBlank(key) || StringUtils.isBlank(value)) {
                            return;
                        }

                        // ignore checkpoints & savepoints dir
                        if (StringUtils.equalsIgnoreCase(key, CHECK_POINTS_KEY)
                            || StringUtils.equalsIgnoreCase(key, CHECK_POINTS_BASE_DIR)) {
                            return;
                        }

                        argList.add("-D" + key + "=" + value);
                    });
        }

        // fix up yarn param
//    argList.add("-t");
//    argList.add(param.getDeploymentTarget());

        if (param.isAllowNonRestoredState()) {
            argList.add("-n");
        }

        // add ext jar to classpath
        if (CollectionUtils.isNotEmpty(param.getExtJarList())) {
            param
                .getExtJarList()
                .forEach(
                    e -> {
                        argList.add("-C");
                        argList.add(e);
                    });
        }

        // fix up other param
        argList.add("-c");
        argList.add(param.getMainClass());
        argList.add(param.getJar());

        if (StringUtils.isNotBlank(param.getSqlFile())) {
            argList.add("-sqlFilePath");
            argList.add(param.getSqlFile());
        }

        List<String> lines = runCmd("root", argList.toArray(new String[argList.size()]));
        LinkedHashSet<String> yarnApplicationIds = new LinkedHashSet<>();
        LinkedHashSet<String> flinkJobIds = new LinkedHashSet<>();
        List<String> logList = new LinkedList<>();
        for (String line : lines) {
            if (StringUtils.contains(line, "Trying to access closed classloader")) {
                break;
            }
            final String flinkJobId = parseFlinkJobId(line);
            if (StringUtils.isNotBlank(flinkJobId)) {
                flinkJobIds.add(flinkJobId);
            }
            logList.add(line);
        }
        FlinkJobResult result = new FlinkJobResult();
        result.setYarnApplicationIds(yarnApplicationIds);
        result.setFlinkJobIds(flinkJobIds);
        result.setEngineJobId(param.getEngineJobId());
        result.setLogList(logList);

        return result;
    }

    // /opt/cloudera/parcels/FLINK/lib/flink/bin/flink stop -m yarn-cluster -yid
    // application_1623805835501_0002 f9a773e1dbbd340bbf45ef13baf7ca6c
    public static void stopJob(String jobId, String flinkJobId, boolean needSavepoint)
        throws Exception {
        List<String> argList = new ArrayList<>();
        argList.add(FLINK_JAR);
        argList.add("stop");
        if (needSavepoint) {
            argList.add("-p");
            argList.add(SAVE_POINTS_BASE_DIR + "/job_" + jobId);
        }
        //    argList.add("-m");
        //    argList.add("yarn-cluster");
        //    argList.add("-yid");
        //    argList.add(yarnApplicationId);
        argList.add(flinkJobId);
        runCmd("root", argList.toArray(new String[argList.size()]));
    }

    // /opt/cloudera/parcels/FLINK/lib/flink/bin/flink cancel -m yarn-cluster -yid
    // application_1623743762614_0002 d4a852db6cedc46b2430d181f25084ee
    public static void cancelJob(String jobId, String yarnApplicationId) throws Exception {
        runCmd("root", FLINK_JAR, "cancel", "-m", "yarn-cluster", "-yid", yarnApplicationId, jobId);
    }

    // yarn logs -applicationId application_1623805835501_0002 --log_files jobmanager.log
    public static List<String> getLogList(String yarnApplicationId) throws Exception {
        List<String> list = Lists.newArrayList();
        // FIXME: 6/22/2022 获取flink日志
        //    List<String> list =
        //        runCmd(
        //            "yarn",
        //            YARN_CMD,
        //            "logs",
        //            "-applicationId",
        //            yarnApplicationId,
        //            "--log_files",
        //            "jobmanager.log");
        if (CollectionUtils.isEmpty(list)) {
            return Lists.newArrayList();
        }

        int start = 0;
        int end = 0;
        for (int index = 0; index < list.size(); index++) {
            String line = list.get(index);
            if (StringUtils.startsWith(line, "LogContents:")) {
                start = index + 1;
            }
            if (StringUtils.startsWith(line, "End of LogType")) {
                end = index;
            }
        }
        return list.subList(start, end);
    }

    public static synchronized List<FlinkCheckpointInfo> getCheckPointList(
        String engineJobId, String flinkJobId) throws Exception {
        List<String> list =
            runCmd(
                "root",
                "ls",
                "-lc",
                "-d",
                "$pwd" + CHECK_POINTS_BASE_DIR + "/job_" + engineJobId + "/" + flinkJobId + "/*",
                "|",
                "grep",
                "|",
                "chk",
                "|",
                "awk",
                "'{print $6,$7,$8,$9}'");
        if (CollectionUtils.isEmpty(list)) {
            return Collections.emptyList();
        }
        return list.stream()
            .filter(e -> StringUtils.contains(e, CHECK_POINTS_BASE_DIR))
            .map(
                e -> {
                    return parseFile(e);
                })
            .filter(Objects::nonNull)
            .collect(Collectors.toList());
    }

    public static void main(String[] args) {
        String str = "Job ID: 753e347907777c0800edecf06b02856e";
        System.out.println(parseFlinkJobId(str));
        Date createTime = DateUtil.toDate("06" + " " + "28" + " " + "14:19", "MM dd HH:mm");
        System.out.println(createTime);
    }

    public static synchronized List<FlinkCheckpointInfo> getSavePointList(
        String engineJobId, String flinkJobId) throws Exception {
        // ls -d $pwd/data/data_engine_file_storage/checkpoint/fefa4c8b3becf56c73273574d7a10158/*|grep chk
        final String prefix = "savepoint-" + flinkJobId.toString().substring(0, 6) + '-';
        List<String> list =
            runCmd(
                "root",
                "ls",
                "-lc",
                "-d",
                "$pwd" + SAVE_POINTS_BASE_DIR + "/job_" + engineJobId + "/*",
                "|",
                "grep",
                prefix,
                "|",
                "awk",
                "'{print $6,$7,$8,$9}'");

        if (CollectionUtils.isEmpty(list)) {
            return Collections.emptyList();
        }

        return list.stream()
            .filter(e -> StringUtils.contains(e, SAVE_POINTS_BASE_DIR))
            .map(e -> parseFile(e))
            .filter(Objects::nonNull)
            .collect(Collectors.toList());
    }

    //  public static String getJobState(String urlPrefix, String flinkJobId, String
    // yarnApplicationId) {
    //    OkHttpClient httpClient =
    //        new OkHttpClient.Builder()
    //            .readTimeout(10, TimeUnit.SECONDS)
    //            .sslSocketFactory(TLSUtil.createSSLSocketFactory(), new TLSUtil.TrustAllManager())
    //            .hostnameVerifier(new TLSUtil.TrustAllHostnameVerifier())
    //            .build();
    //
    //    String url = String.format("%s/proxy/%s/jobs/%s", urlPrefix, yarnApplicationId, flinkJobId);
    //
    //    final Request request = new Request.Builder().url(url).build();
    //
    //    try (final Response response = httpClient.newCall(request).execute()) {
    //      String body = new String(response.body().bytes());
    //
    //      final JSONObject jsonObject = JSONObject.parseObject(body);
    //
    //      if (jsonObject.containsKey("state")) {
    //        return jsonObject.getString("state");
    //      }
    //    } catch (Exception e) {
    //      log.error("请求url:{}异常", url, e);
    //    } finally {
    //      httpClient.connectionPool().evictAll();
    //    }
    //
    //    return null;
    //  }
    //
    //  public static List<JobState> getJobStateListByApplicationId(
    //      String urlPrefix, String yarnApplicationId) {
    //    OkHttpClient httpClient =
    //        new OkHttpClient.Builder()
    //            .readTimeout(10, TimeUnit.SECONDS)
    //            .sslSocketFactory(TLSUtil.createSSLSocketFactory(), new TLSUtil.TrustAllManager())
    //            .hostnameVerifier(new TLSUtil.TrustAllHostnameVerifier())
    //            .build();
    //
    //    String url = String.format("%s/proxy/%s/jobs", urlPrefix, yarnApplicationId);
    //
    //    final Request request = new Request.Builder().url(url).build();
    //
    //    try (final Response response = httpClient.newCall(request).execute()) {
    //      String body = new String(response.body().bytes());
    //
    //      final JSONObject jsonObject = JSONObject.parseObject(body);
    //
    //      if (jsonObject.containsKey("jobs")) {
    //        final JSONArray jobs = jsonObject.getJSONArray("jobs");
    //        final List<JobState> jobStates = jobs.toJavaList(JobState.class);
    //        return jobStates;
    //      }
    //    } catch (Exception e) {
    //      log.error("请求url:{}异常", url, e);
    //    } finally {
    //      httpClient.connectionPool().evictAll();
    //    }
    //
    //    return null;
    //  }
    //
    //  /**
    //   * The application state - can be one of “NEW”, “NEW_SAVING”, “SUBMITTED”, “ACCEPTED”,
    // “RUNNING”,
    //   * “FINISHED”, “FAILED”, “KILLED”
    //   *
    //   * @param urlPrefix
    //   * @param yarnApplicationId
    //   * @return
    //   */
    //  public static String getApplicationState(String urlPrefix, String yarnApplicationId) {
    //    OkHttpClient httpClient =
    //        new OkHttpClient.Builder()
    //            .readTimeout(10, TimeUnit.SECONDS)
    //            .sslSocketFactory(TLSUtil.createSSLSocketFactory(), new TLSUtil.TrustAllManager())
    //            .hostnameVerifier(new TLSUtil.TrustAllHostnameVerifier())
    //            .build();
    //
    //    String url = String.format("%s/ws/v1/cluster/apps/%s/state", urlPrefix, yarnApplicationId);
    //
    //    final Request request = new Request.Builder().url(url).build();
    //
    //    try (final Response response = httpClient.newCall(request).execute()) {
    //      String body = new String(response.body().bytes());
    //
    //      final JSONObject jsonObject = JSONObject.parseObject(body);
    //
    //      if (jsonObject.containsKey("state")) {
    //        return jsonObject.getString("state");
    //      }
    //    } catch (Exception e) {
    //      log.error("请求url:{}异常", url, e);
    //    } finally {
    //      httpClient.connectionPool().evictAll();
    //    }
    //
    //    return null;
    //  }

    public static List<String> getStoppedStateList() {
        List<String> stoppedStateList =
            Lists.newArrayList(
                JobStatus.FAILED.name(),
                JobStatus.CANCELED.name(),
                JobStatus.FINISHED.name(),
                JobStatus.SUSPENDED.name());
        return stoppedStateList;
    }

    public static String getCheckPointsBaseDir() {
        return "file://" + CHECK_POINTS_BASE_DIR;
    }

    public static String getSavePointsBaseDir() {
        return "file://" + SAVE_POINTS_BASE_DIR;
    }

    // drwxr-xr-x - flink flink 0 2021-07-06 18:48
    // /user/flink/cluster_yarn/savepoints/job_65/savepoint-453043-6e92033b
    private static FlinkCheckpointInfo parseFile(String line) {
        Calendar calendar = Calendar.getInstance();
        int year = calendar.get(Calendar.YEAR);
        try {
            String[] array = StringUtils.split(line, " ");
            String fileName = array[3];
            String monthNumByEn = DateUtil.getMonthNumByEn(array[0]);
            Date createTime =
                DateUtil.toDate(
                    year + " " + monthNumByEn + " " + array[1] + " " + array[2], "yyyy MM dd HH:mm");
            return new FlinkCheckpointInfo(fileName, createTime);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
        return null;
    }

    private static String parseYarnApplicationId(String str) {
        return StringUtils.trim(StringUtils.substringAfter(str, "Submitted application "));
    }

    private static String parseFlinkJobId(String str) {
        return StringUtils.trim(StringUtils.substringAfter(str, "Job has been submitted with JobID "));
    }

    /**
     * 60秒超时
     */
    private static List<String> runCmd(String user, String... args) throws Exception {
        String cmd = "/bin/bash ";
        String[] cmdArgs = new String[]{SPACE_JOINER.join(args)};

        try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
            ProcBuilder builder =
                new ProcBuilder("/bin/bash", "-c").withArgs(cmdArgs).withTimeoutMillis(600000);
            log.info("flink cmd" + builder.getCommandLine());
            builder.withOutputStream(out);
            ProcResult result = builder.run();
            if (result.getExitValue() == 0) {
                log.info("execute command {} {} success", cmd, Arrays.toString(cmdArgs));
                String outStr = out.toString(StandardCharsets.UTF_8.name());
                String[] lines = outStr.split("\\r?\\n");
                return Lists.newArrayList(lines);
            }
            String msg =
                String.format(
                    "execute command %s %s error, exitCode %s outStr %s errStr %s",
                    cmd,
                    Arrays.toString(cmdArgs),
                    result.getExitValue(),
                    result.getOutputString(),
                    result.getErrorString());
            log.error(msg);
            throw new Exception(msg);
        }
    }

    private static SqlParser createSqlParser(String sqlStr) {
        return SqlParser.create(
            sqlStr,
            SqlParser.config()
                // use flink parser to parse create table sql
                .withParserFactory(FlinkSqlParserImpl.FACTORY)
                .withLex(Lex.MYSQL)
                .withQuoting(Quoting.BACK_TICK)
                .withUnquotedCasing(Casing.UNCHANGED)
                .withQuotedCasing(Casing.UNCHANGED));
    }

    @Data
    public static class FlinkJobResult {

        private Long engineJobId;
        private LinkedHashSet<String> flinkJobIds;
        private LinkedHashSet<String> yarnApplicationIds;
        private List<String> logList;
    }

    @Data
    public static class StartJobParam {

        // engine job id
        private Long engineJobId;
        // 待执行SQL文件
        private String sqlFile;
        // 是否是debug模式，debug模式不用指定savepoint和checkpoint
        private boolean isDebug = false;
        // savePoint或checkPoint
        private String savePoint;
        // Allow to skip savepoint state that cannot be restored
        private boolean allowNonRestoredState = true;
        // Allows specifying multiple generic configuration options.
        // The available options can be found at
        // https://ci.apache.org/projects/flink/flink-docs-stable/ops/config.html
        private Properties extProperties;
        // Adds a URL to each user code classloader on all nodes in the cluster
        private List<String> extJarList;
        private String deploymentTarget = "yarn-per-job";

        private String mainClass = FLINK_TASK_CLIENT_MAIN_CLASS;
        private String jar = FLINK_TASK_CLIENT_JAR;
    }

    @Data
    public static class JobState {

        private String id;

        private String status;
    }

    @AllArgsConstructor
    @NoArgsConstructor
    @Data
    public static class FlinkCheckpointInfo {

        private String checkpointPath;

        private Date createTime;
    }

    private static class FunctionVisitor extends SqlShuttle {

        private Set<String> set = new HashSet<>();

        public List<String> getFunctionList() {
            return CollectionUtils.isEmpty(set) ? Collections.emptyList() : new ArrayList<>(set);
        }

        @Override
        public SqlNode visit(SqlCall sqlCall) {
            SqlOperator operator = sqlCall.getOperator();
            if (operator instanceof SqlUnresolvedFunction) {
                final SqlUnresolvedFunction unresolvedFunction = (SqlUnresolvedFunction) operator;
                final String name = unresolvedFunction.getName();
                set.add(name);
            }
            return super.visit(sqlCall);
        }
    }

//  public static void main(String... args) throws Exception {
//    String sql = "insert into mysqlSink select id,name,StringLengthUdf(name) from kafkaTable;";
//    final List<String> functionList = getFunctionList(sql);
//    functionList.forEach(System.out::println);
//  }
}
