package com.atguigu.app.dwd.db;

import com.atguigu.common.Constant;
import com.atguigu.util.HBaseUtil;
import com.atguigu.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

//数据流：web/app -> Nginx -> 业务服务器(Mysql) -> Maxwell -> Kafka(ODS) -> FlinkApp -> Kafka(DWD)
//程  序：Mock -> maxwell.sh -> Kafka(ZK) -> Dwd02_InteractionCommentInfo(lookUpJoin HBase|HDFS ZK) -> Kafka(ZK)
public class Dwd02_InteractionCommentInfo {

    public static void main(String[] args) throws Exception {

        //TODO 1.获取执行环境 FlinkSQL
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);  //生产环境中,主题并行度与Kafka主题的分区数保持一致
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //Logger logger = LoggerFactory.getLogger(Dwd01_TrafficBaseLogSplit.class);
        //logger.info("aaa");

        //1.1 开启CheckPoint
        //env.enableCheckpointing(60000 * 5);
        //env.setStateBackend(new HashMapStateBackend());

        //1.2 CheckPoint相关设置
        //CheckpointConfig checkpointConfig = env.getCheckpointConfig();
        //checkpointConfig.setCheckpointTimeout(10000L);
        //checkpointConfig.setCheckpointStorage("hdfs://hadoop102:8020/flink-ck");
        //Cancel任务时保存最后一次CheckPoint结果
        //checkpointConfig.setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //checkpointConfig.setMinPauseBetweenCheckpoints(5000L);
        //设置重启策略
        //env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 1000L));

        //TODO 2.使用FlinkSQL方式读取Kafka topic_db主题数据创建动态表
        tableEnv.executeSql(KafkaUtil.getTopicDbDDL("dwd02_comment_info_230524"));

        //TODO 3.过滤出评价表数据
        Table commentInfoTable = tableEnv.sqlQuery("" +
                "select\n" +
                "    `data`['id'] id,\n" +
                "    `data`['user_id'] user_id,\n" +
                "    `data`['nick_name'] nick_name,\n" +
                "    `data`['sku_id'] sku_id,\n" +
                "    `data`['spu_id'] spu_id,\n" +
                "    `data`['order_id'] order_id,\n" +
                "    `data`['appraise'] appraise,\n" +
                "    `data`['comment_txt'] comment_txt,\n" +
                "    `data`['create_time'] create_time,\n" +
                "    `pt`\n" +
                "from ods_topic_db\n" +
                "where `database` = 'gmall-230524-flink'\n" +
                "and `table` = 'comment_info'\n" +
                "and (`type` = 'insert' or `type` = 'update')");
        tableEnv.createTemporaryView("comment_info", commentInfoTable);

        //TODO 4.读取HBase base_dic 表
        tableEnv.executeSql(HBaseUtil.getBaseDicDDL());

        //打印测试
//        tableEnv.sqlQuery("select rowkey,info.dic_name from dim_base_dic")
//                .execute()
//                .print();

        //TODO 5.关联评价表与base_dic维表  lookUpJoin
        Table resultTable = tableEnv.sqlQuery("" +
                "select\n" +
                "    ci.id,\n" +
                "    ci.user_id,\n" +
                "    ci.nick_name,\n" +
                "    ci.sku_id,\n" +
                "    ci.spu_id,\n" +
                "    ci.order_id,\n" +
                "    ci.appraise,\n" +
                "    dim_base_dic.info.dic_name appraise_name,\n" +
                "    ci.comment_txt,\n" +
                "    ci.create_time\n" +
                "from comment_info ci\n" +
                "join dim_base_dic FOR SYSTEM_TIME AS OF ci.pt\n" +
                "on ci.appraise = dim_base_dic.rowkey");
        tableEnv.createTemporaryView("result_table", resultTable);

        //TODO 6.将数据写出到Kafka
        tableEnv.executeSql("" +
                "create table dwd_comment_info(\n" +
                "    `id` string,\n" +
                "    `user_id` string,\n" +
                "    `nick_name` string,\n" +
                "    `sku_id` string,\n" +
                "    `spu_id` string,\n" +
                "    `order_id` string,\n" +
                "    `appraise` string,\n" +
                "    `appraise_name` string,\n" +
                "    `comment_txt` string,\n" +
                "    `create_time` string\n" +
                ")" + KafkaUtil.getKafkaSinkDDL(Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO));
        tableEnv.executeSql("insert into dwd_comment_info select * from result_table")
                .print();

        //TODO 7.启动任务
        env.execute("Dwd02_InteractionCommentInfo");

    }

}
