package com.atguigu.gmall.realtime.dwd.db.app;


import com.atguigu.gmall.realtime.common.constant.Constant;
import com.atguigu.gmall.realtime.common.utill.SQLUtil;
import com.sun.org.apache.bcel.internal.Const;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdInteractionCommentInfo {
    public static void main(String[] args) {
        //1.环境准备
        //指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度
        env.setParallelism(4);
        //指定流表环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2.检查点相关
        //开启检查点
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);//看需求：要求数据准确还是时效性
        CheckpointConfig checkpointConfig = env.getCheckpointConfig();
            /*
            //设置检查点超时时间
            checkpointConfig.setCheckpointTimeout(60000L);
            //设置job取消之后检查点是否保留
            checkpointConfig.setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
            //设置两个检查点之间的最小间隔时长
            checkpointConfig.setMinPauseBetweenCheckpoints(2000L);
            //设置重启策略
            env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,3000L));
            //设置状态后端
            env.setStateBackend(new HashMapStateBackend());
            //设置检查点的存储路径
            checkpointConfig.setCheckpointStorage("hdfs://hadoop102:8020/ck/");
            //设置操作hadoop的用户
            System.setProperty("HADOOP_USER_NAME","atugigu");
            */


        //3.从kafka主题中读取数据创建动态表，并指定处理时间、事件时间以及watermarker的生成策略（Kafka连接器）
        tableEnv.executeSql("CREATE TABLE topic_db (\n" +
                "  `database` string,\n" +
                "  `table` string,\n" +
                "  `type` string,\n" +
                "  `data` MAP<string,string>,\n" +
                "  `old` MAP<string,string>,\n" +
                "   ts bigint,\n" +
                "   pt as PROCTIME(),\n" +
                "   et as TO_TIMESTAMP_LTZ(ts, 0),\n" +
                "   WATERMARK FOR et AS et\n" +
                ") " + SQLUtil.getKafkaDDL(Constant.TOPIC_DB,Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO));
            //tableEnv.executeSql("select * from topic_db").print();
        //4.过滤出评论
        Table commentTab = tableEnv.sqlQuery("select\n" +
                "      `data`['id'] id,\n" +
                "      `data`['user_id'] user_id,\n" +
                "      `data`['sku_id'] sku_id,\n" +
                "      `data`['appraise'] appraise,\n" +
                "      `data`['comment_txt'] comment_txt,\n" +
                "pt,\n" +
                "ts\n" +
                "from topic_db where `table`='comment_info' and`type`='insert'");
        tableEnv.createTemporaryView("comment_table",commentTab);
        //tableEnv.executeSql("select * from comment_table").print();
        //5.从HBase中查询字典数据创建动态表（hbase链接）
        tableEnv.executeSql("CREATE TABLE base_dic (\n" +
                " dic_code string,\n" +
                " info ROW<dic_name string>,\n" +
                " PRIMARY KEY (dic_code) NOT ENFORCED\n" +
                ")"+SQLUtil.getHBaseDDL("dim_base_dic"));
        //tableEnv.executeSql("select dic_code,dic_name from base_dic").print();

        //6.将评论和字典表进行关联（LookupJoin）以左表驱动，先查缓冲里的数据，如果缓存里没有，再到hbase
        Table joinedTable = tableEnv.sqlQuery("SELECT \n" +
                "   id,\n" +
                "   user_id,\n" +
                "   sku_id,\n" +
                "   appraise,\n" +
                "   dic.dic_name appraise_name,\n" +
                "   comment_txt,\n" +
                "   ts\n" +
                "FROM comment_table AS c\n" +
                "  JOIN base_dic FOR SYSTEM_TIME AS OF c.pt AS dic\n" +
                "    ON c.appraise = dic.dic_code");
        //joinedTable.execute().print();
        //7.将关联的结果写到Kafka主题（upset Kafka连接器）
        tableEnv.executeSql("CREATE TABLE "+Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO+" (\n" +
                "  id string,\n" +
                "  user_id string,\n" +
                "  sku_id string,\n" +
                "  appraise string,\n" +
                "  appraise_name string,\n" +
                "  comment_txt string,\n" +
                "  ts bigint,\n" +
                "  PRIMARY KEY (id) NOT ENFORCED\n" +
                ") "+SQLUtil.getUpsertKafkaDDL(Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO));
        //写入
        joinedTable.executeInsert(Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO);

        //
    }

}
