package com.changan.examples;

import com.changan.model.CancelArgs;
import com.changan.model.FlinkRunParam;
import com.changan.model.StopArgs;
import com.changan.utils.FlinkRestApiUtil;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;

import java.io.IOException;

public class AppStage1 {

    private static final String FLINK_HOST = "";
    private static final String FLINK_PORT = "";
    private static final String FLINK_SAVEPOINT_PATH = "";
    private static String SAVEPOINT_PATH = "";
    private static String START_CLASS = "me.roohom.Application";
    //private static final String START_ARGS = "{\"database\":\"mydb\",\"table\":\"user_info\",\"sourceTableName\":\"user_info\",\"targetTableName\":\"impala::test.user_info\",\"sourceColumns\":[{\"name\":\"id\",\"type\":\"bigint\",\"isKey\":true,\"index\":1},{\"name\":\"user_name\",\"type\":\"string\",\"isKey\":false,\"index\":2},{\"name\":\"age\",\"type\":\"int\",\"isKey\":false,\"index\":3},{\"name\":\"height\",\"type\":\"string\",\"isKey\":false,\"index\":4},{\"name\":\"hobby\",\"type\":\"string\",\"isKey\":false,\"index\":5},{\"name\":\"create_time\",\"type\":\"timestamp\",\"isKey\":false,\"index\":6},{\"name\":\"update_time\",\"type\":\"timestamp\",\"isKey\":false,\"index\":7}],\"targetColumns\":[{\"name\":\"id\",\"type\":\"bigint\",\"isKey\":true,\"index\":1},{\"name\":\"user_name\",\"type\":\"string\",\"isKey\":false,\"index\":2},{\"name\":\"age\",\"type\":\"int\",\"isKey\":false,\"index\":3},{\"name\":\"height\",\"type\":\"string\",\"isKey\":false,\"index\":4},{\"name\":\"hobby\",\"type\":\"string\",\"isKey\":false,\"index\":5},{\"name\":\"create_time\",\"type\":\"timestamp(3)\",\"isKey\":false,\"index\":6},{\"name\":\"update_time\",\"type\":\"timestamp(3)\",\"isKey\":false,\"index\":7}]}";
    private static final String START_ARGS = "-jn flink-starrocks-cdc -sn mysql -st order_info,order_details,user_info,user_details -spk id;id;id;id -tn starrocks -tt order_info,oder_details,user_info,user_details -tpk id;id;id;id -kf kafka -kt star_order_info_topic,star_order_details_topic,star_user_info_topic,star_user_details_topic -ka cdh001:9092,cdh002:9092,cdh003:9092 -kp id;id;id;id -kts _topic -kgp flink-cdc-starrocks -kssm earliest-offset -sa jdbc:mysql://192.168.2.43:13306/star -un root -pw zd123456 -hs 192.168.2.43 -pt 13306 -db star -ta jdbc-url=jdbc:mysql://192.168.1.94:9030|load-url=192.168.1.94:8030 -tu roohom -tpw 123456 -tdb star -sum initial";
    //private static final String START_ARGS = "{\"database\":\"mydb\",\"table\":\"user_info\",\"sourceTableName\":\"user_info\",\"targetTableName\":\"impala::test.user_info\",\"sourceColumns\":[{\"name\":\"id\",\"type\":\"bigint\",\"isKey\":true,\"index\":1},{\"name\":\"user_name\",\"type\":\"string\",\"isKey\":false,\"index\":2},{\"name\":\"age\",\"type\":\"int\",\"isKey\":false,\"index\":3},{\"name\":\"height\",\"type\":\"string\",\"isKey\":false,\"index\":4},{\"name\":\"hobby\",\"type\":\"string\",\"isKey\":false,\"index\":5},{\"name\":\"create_time\",\"type\":\"timestamp\",\"isKey\":false,\"index\":6},{\"name\":\"update_time\",\"type\":\"timestamp\",\"isKey\":false,\"index\":7}],\"targetColumns\":[{\"name\":\"id\",\"type\":\"bigint\",\"isKey\":true,\"index\":1},{\"name\":\"user_name\",\"type\":\"string\",\"isKey\":false,\"index\":2},{\"name\":\"age\",\"type\":\"int\",\"isKey\":false,\"index\":3},{\"name\":\"height\",\"type\":\"string\",\"isKey\":false,\"index\":4},{\"name\":\"hobby\",\"type\":\"string\",\"isKey\":false,\"index\":5},{\"name\":\"create_time\",\"type\":\"timestamp(3)\",\"isKey\":false,\"index\":6},{\"name\":\"update_time\",\"type\":\"timestamp(3)\",\"isKey\":false,\"index\":7}]}";


    public static void main(String[] args) throws IOException, InterruptedException {
        mockJobRestore();
    }

    public static void mockJobRestore() throws IOException, InterruptedException {
        ObjectMapper objectMapper = new ObjectMapper();

        String flinkJarsInfo = FlinkRestApiUtil.getFlinkJarsInfo(FLINK_HOST, FLINK_PORT);
        System.out.println("FLINK WEB上的jar信息:" + flinkJarsInfo);
        Thread.sleep(1000);

        JsonNode jsonNode = objectMapper.readTree(flinkJarsInfo);
        JsonNode filesNode = jsonNode.get("files");
        String jarId = "";
        for (JsonNode node : filesNode) {
            jarId = node.get("id").asText();
        }
        System.out.println(jarId);
        String params = START_ARGS;
        System.out.println(params);
        Thread.sleep(1000);

        //1、提交任务
        FlinkRunParam flinkRunParam = new FlinkRunParam();
        flinkRunParam.setEntryClass(START_CLASS);
        flinkRunParam.setProgramArgs(params.replaceAll("\"", "|"));

        //{"jobid":"05b1a1b22f7139f42fa6c60370b47a4e"}
        String startResponse = FlinkRestApiUtil.startUpFlinkJob(FLINK_HOST, FLINK_PORT, jarId, flinkRunParam);
        System.out.println("提交启动任务接收到的响应: -> " + startResponse);

        //2、任务运行，接收到schema变更的消息，停掉flink job并生成savepoint
        Thread.sleep(100000);
        System.out.println("SCHEMA: ->" + "接收到SCHEMA变更消息");
        JsonNode startJsonNode = objectMapper.readTree(startResponse);
        String jobId = startJsonNode.get("jobid").asText();

        StopArgs stopArgs = new StopArgs();
        stopArgs.setDrain(false);
        stopArgs.setTargetDirectory(FLINK_SAVEPOINT_PATH);
        System.out.println("开始停止任务并且生成savepoint");
        //String stopFlinkJobResponse = FlinkRestApiUtil.stopFlinkJobWithSavepoint(
        //        FLINK_HOST,
        //        FLINK_PORT,
        //        jobId,
        //        stopArgs
        //);
        //{"request-id":"d235b264d99618ca3714c812c47525a8"}

        CancelArgs cancelArgs = new CancelArgs();
        cancelArgs.setCancelJob(false);
        cancelArgs.setTargetDirectory("hdfs://nameservice1/tmp/yarnjobs/");
        String saResponse = FlinkRestApiUtil.triggerSavepoint(
                FLINK_HOST,
                FLINK_PORT,
                jobId,
                cancelArgs
        );
        System.out.println(saResponse);
        JsonNode requestIdNode = objectMapper.readTree(saResponse.toString());
        String requestId = requestIdNode.get("request-id").asText();
        String statusOfSaOp = "";
        for (int i = 0; i < 5; i++) {
            System.out.println("这是第" + i + "次查询savapoint是否成功.");
            //{"status":{"id":"COMPLETED"},"operation":{"location":"hdfs://nameservice1/tmp/yarnjobs/savepoint-05b1a1-cb29b9f330c7"}}
            statusOfSaOp = FlinkRestApiUtil.getStatusOfSaOp(FLINK_HOST, FLINK_PORT, jobId, requestId);
            if (objectMapper.readTree(statusOfSaOp).get("status").get("id").asText().equals("COMPLETED")) {
                System.out.println("OK, a savepoint has made: -> " + statusOfSaOp);
                SAVEPOINT_PATH = objectMapper.readTree(statusOfSaOp).get("operation").get("location").asText();
                break;
            }
            Thread.sleep(1000);
        }

        Integer integer = FlinkRestApiUtil.terminateFlinkJobByJobId(
                FLINK_HOST,
                FLINK_PORT,
                jobId
        );
        System.out.println(integer);

        //3、模拟修改下游表的schema
        System.out.println("DOWNSTREAM: -> " + "NOW WE ARE CHANGING SCHEMA. PLEASE HOLD ON A SEC.");
        System.out.println("现在需要去kudu修改表结构...(MYSQL)");
        Thread.sleep(30000);
        System.out.println("DOWNSTREAM: -> " + "SCHEMA CHANGING FINISHED. NOW YOU CAN DO WHAT YOU NEED TO DO.");
        Thread.sleep(1000);

        //4、收到修改完成信号，重启任务
        System.out.println("接收到修改完成信号，修改启动参数并重启任务");
        //params = "{\"database\":\"mydb\",\"table\":\"user_info\",\"sourceTableName\":\"user_info\",\"targetTableName\":\"impala::test.user_info\",\"sourceColumns\":[{\"name\":\"id\",\"type\":\"bigint\",\"isKey\":true,\"index\":1},{\"name\":\"name\",\"type\":\"string\",\"isKey\":false,\"index\":2},{\"name\":\"age\",\"type\":\"int\",\"isKey\":false,\"index\":3},{\"name\":\"height\",\"type\":\"string\",\"isKey\":false,\"index\":4},{\"name\":\"hobby\",\"type\":\"string\",\"isKey\":false,\"index\":5},{\"name\":\"create_time\",\"type\":\"timestamp\",\"isKey\":false,\"index\":6},{\"name\":\"update_time\",\"type\":\"timestamp\",\"isKey\":false,\"index\":7}],\"targetColumns\":[{\"name\":\"id\",\"type\":\"bigint\",\"isKey\":true,\"index\":1},{\"name\":\"name\",\"type\":\"string\",\"isKey\":false,\"index\":2},{\"name\":\"age\",\"type\":\"int\",\"isKey\":false,\"index\":3},{\"name\":\"height\",\"type\":\"string\",\"isKey\":false,\"index\":4},{\"name\":\"hobby\",\"type\":\"string\",\"isKey\":false,\"index\":5},{\"name\":\"create_time\",\"type\":\"timestamp(3)\",\"isKey\":false,\"index\":6},{\"name\":\"update_time\",\"type\":\"timestamp(3)\",\"isKey\":false,\"index\":7}]}";
        params = "-jn flink-starrocks-cdc -sn mysql -st order_info,order_details,user_info,user_details,user_additional -spk id;id;id;id;id -tn starrocks -tt order_info,oder_details,user_info,user_details,user_additional -tpk id;id;id;id -kf kafka -kt star_order_info_topic,star_order_details_topic,star_user_info_topic,star_user_details_topic,user_additional_topic -ka cdh001:9092,cdh002:9092,cdh003:9092 -kp id;id;id;id;id -kts _topic -kgp flink-cdc-starrocks -kssm earliest-offset -sa jdbc:mysql://192.168.2.43:13306/star -un root -pw zd123456 -hs 192.168.2.43 -pt 13306 -db star -ta jdbc-url=jdbc:mysql://192.168.1.94:9030|load-url=192.168.1.94:8030 -tu roohom -tpw 123456 -tdb star -sum initial";

        FlinkRunParam flinkRunArg = new FlinkRunParam();
        flinkRunArg.setEntryClass(START_CLASS);
        flinkRunArg.setProgramArgs(params.replaceAll("\"", "|"));
        flinkRunArg.setSavepointPath(SAVEPOINT_PATH);

        String restartWithSaResponse = FlinkRestApiUtil.startUpFlinkJob(
                FLINK_HOST,
                FLINK_PORT,
                jarId,
                flinkRunArg
        );
        System.out.println(restartWithSaResponse);

    }
}
