
package com.yangzb.flink.sql.sample;

import com.yangzb.flink.sql.sample.constant.SystemConstant;
import com.yangzb.flink.sql.sample.execute.ExecuteSql;
import com.yangzb.flink.sql.sample.model.SqlCommandCall;
import com.yangzb.flink.sql.sample.sql.SqlFileParser;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.StatementSet;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.List;

/**
 * @author: yangzb
 * @date 2023/2/21 10:43 AM
 **/
public class SqlSubmit {

    private String sqlFilePath;
    private String jobName;
    private StreamTableEnvironment tEnv;
    public SqlSubmit(String sqlFilePath, String jobName) {
        this.sqlFilePath = sqlFilePath;
        this.jobName = jobName;
    }

    public void run() throws Exception {
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
//                .useBlinkPlanner()
                .inStreamingMode()
                .build();
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(10, Time.milliseconds(30000)));
        env.disableOperatorChaining();
        env.setParallelism(1);
        // 每 1000ms 开始一次 checkpoint
        env.enableCheckpointing(1000 * 60 * 5, CheckpointingMode.EXACTLY_ONCE);
        CheckpointConfig checkpointConfig = env.getCheckpointConfig();
        checkpointConfig.enableUnalignedCheckpoints();
        // 高级选项：

        // 设置模式为精确一次 (这是默认值)
        checkpointConfig.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);

        // 确认 checkpoints 之间的时间会进行 500 ms
        checkpointConfig.setMinPauseBetweenCheckpoints(500);

        // Checkpoint 必须在一分钟内完成，否则就会被抛弃
        checkpointConfig.setCheckpointTimeout(1000 * 60 * 10);

        // 同一时间只允许一个 checkpoint 进行
        checkpointConfig.setMaxConcurrentCheckpoints(1);
        checkpointConfig.setTolerableCheckpointFailureNumber(5);

        // 开启在 job 中止后仍然保留的 externalized checkpoints
        checkpointConfig.enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        env.setStateBackend(new RocksDBStateBackend("hdfs://hdp6.tydic.com:8020/flink/cp112", true));
//        env.setStateBackend(new FsStateBackend("hdfs://hdp6.tydic.com:8020/flink/cp112",true));


        this.tEnv = StreamTableEnvironment.create(env, settings);
        List<String> sqls = Files.readAllLines(Paths.get(sqlFilePath));
//        String sql = HttpUtil.get("http://192.168.10.18:8081/static/q2.sql");
//        String[] split = sql.split("\n");
//        List<String> sqls = Arrays.asList(split);

        List<SqlCommandCall> sqlCommandCallList = SqlFileParser.fileToSql(sqls);
        StatementSet statementSet = tEnv.createStatementSet();
        ExecuteSql.exeSql(sqlCommandCallList, tEnv, statementSet);
        Configuration configuration = tEnv.getConfig().getConfiguration();
        configuration.setString("pipeline.name", jobName);
        configuration.setBoolean("rest.flamegraph.enabled", true);
        configuration.setString("rest.flamegraph.enabled", "true");
        TableResult tableResult = statementSet.execute();
        if (tableResult == null || tableResult.getJobClient().get() == null
                || tableResult.getJobClient().get().getJobID() == null) {
            throw new RuntimeException("任务运行失败 没有获取到JobID");
        }
        JobID jobID = tableResult.getJobClient().get().getJobID();
        System.out.println(SystemConstant.QUERY_JOBID_KEY_WORD + jobID);
    }
}
