package com.atliuzu.app.dwd.db;

import com.atliuzu.utils.MyKafkaUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

//数据流：web/app -> Nginx -> 业务服务器 -> Mysql(binlog) -> Maxwell -> Kafka(ODS) -> FlinkApp -> Kafka(DWD)
//程  序：Mock -> Mysql(binlog) -> Maxwell -> Kafka(ZK) -> DwdInteractionFavorAdd -> Kafka(ZK)
public class DwdInteractionComment {
    public static void main(String[] args) throws Exception {


        // TODO 1. 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);


        // TODO 2. 状态后端设置
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(
//                3, Time.days(1), Time.minutes(1)
//        ));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage(
//                "hdfs://hadoop102:8020/ck"
//        );
//        System.setProperty("HADOOP_USER_NAME", "atguigu");
        //TODO 3,从kafka读取业务数据,封装为Flink SQL表
        tableEnv.executeSql("create table topic_db( " +
                "`database` string, " +
                "`table` string, " +
                "`data` map<string, string>, " +
                "`type` string, " +
                "`ts` string " +
                ")" + MyKafkaUtil.getKafkaDDL("topic_db", "comment"));

        //tableEnv.executeSql("select * from topic_db").print();


        //TODO 4,读取课程评价表数据  review_info
        Table reviewInfo = tableEnv.sqlQuery("select " +
                "data['id'] id, " +
                "data['user_id'] user_id, " +
                "data['course_id'] course_id, " +
                "data['review_txt'] review_txt, " +
                "data['review_stars'] review_stars, " +
                "data['create_time'] create_time, " +
                "data['deleted'] deleted, " +
                "ts " +
                "from topic_db " +
                "where `table` = 'review_info' " +
                "and `type` = 'insert' ");
        tableEnv.createTemporaryView("review_info", reviewInfo);
        //tableEnv.executeSql("select * from review_info ").print();


        //TODO 5,创建 KAFKA-Connector dwd-interaction-comment表
        tableEnv.executeSql("create table dwd_interaction_comment( " +
                "id string, " +
                "user_id string, " +
                "course_id string, " +
                "review_txt string, " +
                "review_stars string, " +
                "create_time string, " +
                "deleted string, " +
                "ts string " +
                ")" + MyKafkaUtil.getInsertKafkaDDL("dwd_interaction_comment"));


        //TODO 6,将数据写入Kafka表
        tableEnv.executeSql("insert into dwd_interaction_comment select * from review_info");

        //打印测试
        tableEnv.executeSql("select * from dwd_interaction_comment").print();


    }

}