package com.atguigu.gmall.realtime.common.base;

import com.atguigu.gmall.realtime.common.constant.Constant;
import com.atguigu.gmall.realtime.common.util.SQLUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public abstract class BaseSQLApp {

    public void start(int port, int parallelism, String ck){
        // TODO 1.基本环境准备
        Configuration conf = new Configuration();
        conf.set(RestOptions.PORT,port);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(parallelism);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        // TODO 2.检查点设置
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//        CheckpointConfig checkpointConfig = env.getCheckpointConfig();
//        checkpointConfig.setCheckpointTimeout(60000L);
//        checkpointConfig.setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        checkpointConfig.setMinPauseBetweenCheckpoints(2000L);
//        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 3000L));
//        env.setStateBackend(new HashMapStateBackend());//存储位置不一样
//        checkpointConfig.setCheckpointStorage("hdfs://hadoop102:8020/ck/" + ck);
//        System.setProperty("HADOOP_USER_NAME", "atguigu");

        handle(env , tableEnv);
    }

    public abstract void handle(StreamExecutionEnvironment env, StreamTableEnvironment tableEnv) ;

    protected static void readOdsDb(StreamTableEnvironment tableEnv, String groupId) {
        tableEnv.executeSql("CREATE TABLE topic_db (\n" +
                "  `database` string,\n" +
                "  `table` string,\n" +
                "  `type` string,\n" +
                "  `data` map<String,String>,\n" +
                "  `old` map<String,String>,\n" +
                "  `ts` bigint,\n" +
                "  `pt` as proctime(),\n" +
                "  `et` as TO_TIMESTAMP_LTZ(ts, 0),\n" +
                "  WATERMARK FOR et AS et\n" +
                ")" + SQLUtil.getKafkaDDL(Constant.TOPIC_DB , groupId));
    }

    protected static void readBaseDic(StreamTableEnvironment tableEnv) {
        tableEnv.executeSql("CREATE TABLE base_dic (\n" +
                " dic_code string,\n" +
                " info ROW<dic_name string>,\n" +
                " PRIMARY KEY (dic_code) NOT ENFORCED\n" +
                ")" + SQLUtil.getHbaseDDL("dim_base_dic"));

    }
}
