package com.fourth.app.dwd.db;

import com.fourth.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author yangyu
 * @create 2022/8/19
 */
//课程评价
public class DwdInteractionReview {
    public static void main(String[] args) {

        //1、环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2.状态后端设置
        /* env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        env.getCheckpointConfig().enableExternalizedCheckpoints(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
        );
        env.setRestartStrategy(RestartStrategies.failureRateRestart(
                10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)
        ));
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop101:8020/edu/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/

        //3、从 Kafka 读取业务数据，封装为 Flink SQL 表
        tableEnv.executeSql("create table topic_db(" +
                "`database` string, " +
                "`table` string, " +
                "`type` string, " +
                "`data` map<string, string>, " +
                "`proc_time` as PROCTIME(), " +
                "`ts` string " +
                ")" + MyKafkaUtil.getKafkaDDL("topic_db","edu_review"));

        // 4、读取评论表数据
        Table reviewInfo = tableEnv.sqlQuery("select " +
                "data['id'] id, " +
                "data['user_id'] user_id, " +
                "data['course_id'] course_id, " +
                "data['review_txt'] review_txt, " +
                "data['review_stars'] review_stars, " +
                "data['create_time'] create_time, " +
                "data['deleted'] deleted, " +
                //"proc_time, " +
                "ts " +
                "from topic_db " +
                "where `table` = 'review_info' " +
                "and `type` = 'insert' ");
        tableEnv.createTemporaryView("review_info",reviewInfo);

        //5、建立 Kafka-Connector dwd_interaction_review表
        tableEnv.executeSql("create table dwd_interaction_review( " +
                "id string, " +
                "user_id string, " +
                "course_id string, " +
                "review_txt string, " +
                "review_stars string, " +
                "create_time string, " +
                "deleted string, " +
                "ts string " +
                ")" + MyKafkaUtil.getKafkaSinkDDL("dwd_interaction_review"));

        //6、将关联结果写入 Kafka-Connector 表
        tableEnv.executeSql("insert into dwd_interaction_review select * from review_info");

    }
}
