package com.atguigu.edu.realtime.app.dwd.db;

import com.atguigu.edu.realtime.util.MyKafkaUtil;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author zhangsan
 * @Date 2022/10/19 8:53
 * @Description //TODO 课程域 课程用户评价表 课程评价表（review_info）
 */
public class DwdCourseUserReview {
    public static void main(String[] args) throws Exception{

        //TODO 1.基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO 2.检查点相关设置
        /*env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(6000L);
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(3)));
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME","atguigu");*/

        //TODO 3.从 kafka的topic_db 中读取数据
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_course_user_review_group"));
        //TODO 4.过滤出 评价表中的数据
        Table reviewInfo = tableEnv.sqlQuery("select\n " +
                "   `data`['id'] id,\n" +
                "   `data`['user_id'] user_id,\n" +
                "   `data`['course_id'] course_id,\n" +
                "   `data`['review_stars'] review_stars,\n" +
                "   `data`['create_time'] ts\n" +
                "from topic_db\n" +
                "where `table` = 'review_info'\n" +
                "and `data`['deleted'] = '0'");
        tableEnv.createTemporaryView("review_temp_table",reviewInfo);

        //TODO 5.将结果写到kafka对应的主题中
        tableEnv.executeSql("CREATE TABLE dwd_course_user_review_info (\n" +
                "  id string,\n" +
                "  user_id string,\n" +
                "  course_id string,\n" +
                "  review_stars  string,\n" +
                "  ts string,\n" +
                "  PRIMARY KEY (id) NOT ENFORCED\n" +
                ") " + MyKafkaUtil.getUpsertKafkaDDL("dwd_course_user_review_info"));

        //tableEnv.executeSql("select * from review_temp_table").print();
        tableEnv.executeSql("insert into dwd_course_user_review_info select * from review_temp_table");


    }
}
