package com.atguigu.app.dwd.db;

import com.atguigu.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdInteractionReview {
    public static void main(String[] args) throws Exception {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv= StreamTableEnvironment.create(env);



        // TODO 2 设置状态后端
//
//         env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
//         env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
//         env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
//         env.setStateBackend(new HashMapStateBackend());
//         env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
//         System.setProperty("HADOOP_USER_NAME", "atguigu");


        //todo 3 从topic_db拉取数据
        String topicName = "topic_db";
        String groupId = "dwd_interaction_review";
        tableEnv.executeSql("create table topic_db(\n" +
                "  `database` String,\n" +
                "  `table` String,\n" +
                "  `type` String,\n" +
                "  `ts` String,\n" +
                "  `xid` String,\n" +
                "  `data`  MAP<STRING,STRING>,\n" +
                "  `old` MAP<STRING,STRING>,\n" +
                "  `pt`  as proctime()\n" +
                "  )" + KafkaUtil.getKafkaDDL(topicName, groupId));

        //todo 4 从topic_db中过滤数据
        Table reviewStream = tableEnv.sqlQuery("select\n" +
                "`data`['id']  id,   \n" +
                "`data`['user_id'] user_id,  \n" +
                "`data`['course_id'] course_id,  \n" +
                "`data`['review_txt']  review_txt,   \n" +
                "`data`['review_stars']  review_stars,   \n" +
                "`data`['create_time'] create_time,  \n" +
                "`data`['deleted'] deleted, \n" +
                "`ts` ,\n" +
                "`pt` \n" +
                "from topic_db \n" +
                "where `table` = 'review_info'\n" +
                "and `type` = 'insert'");

        tableEnv.createTemporaryView("review_info",reviewStream);

        //写到kafka中
        String targetTopicName = "dwd_interaction_review";
        tableEnv.executeSql("create table kafka_review_info(\n" +
                "  `id`  String,\n" +
                "  `user_id` String,\n" +
                "  `course_id` String,\n" +
                "  `review_txt`  String,\n" +
                "  `review_stars`  STRING,\n" +
                "  `create_time` String,\n" +
                "  `deleted` String,\n" +
                "  `ts` String,\n" +
                "  `pt` TIMESTAMP_LTZ(3)\n" +
                ")" +   KafkaUtil.getKafkaSinkDDL(targetTopicName));

        tableEnv.executeSql("insert into kafka_review_info select * from review_info");
        tableEnv.sqlQuery("select * from kafka_review_info").execute().print();

    }
}
