package com.wsjj.gmall.base;

import com.wsjj.gmall.constant.Constant;
import com.wsjj.gmall.util.SQLUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;




public abstract class BaseSQLApp {
    public  void start(int port,int parallelism,String ck) {
        //        TODO 1.环境准备
        Configuration conf = new Configuration();
        conf.set(RestOptions.PORT,port);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(parallelism);
        StreamTableEnvironment streamTable= StreamTableEnvironment.create(env);

//        TODO 2.检查点设置
//        开启检查点
//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
////        设置检查点超时时间
//        env.getCheckpointConfig().setCheckpointTimeout(6000L);
////        设置状态取消后，检查点是否保留
//        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
////        设置两个检查点之间最小时间间隔
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
////        设置重启策略
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30), Time.seconds(3)));
////        设置状态后端
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020");
////        设置操作hadoop用户
//        System.setProperty("HADOOP_USER_NAME","atguigu");

//        TODO 3.业务方法
        handle(streamTable);
    }

    public abstract  void handle(StreamTableEnvironment streamTable);



    public void readOdsTopicdb(StreamTableEnvironment streamTable) {
        streamTable.executeSql("CREATE TABLE KafkaTable (\n" +
                "  `database` string,\n" +
                "  `table` string,\n" +
                "  `type` string,\n" +
                "  `ts` bigint,\n" +
                "  `data` map<string,string>,\n" +
                "  `old` map<string,string>,\n" +
                "  proc_tim as proctime()\n" +
                "\n" +
                ") " + SQLUtil.getKafkaDDL(Constant.TOPIC_DB, "testGroup"));
    }
}
