package com.bw.gmall.realtime.dwd.db.app;

import com.bw.gmall.realtime.common.base.BaseSQLApp;
import com.bw.gmall.realtime.common.constant.Constant;
import com.bw.gmall.realtime.common.util.SQLUtil;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * lookup join  可以杜绝OOM(内存溢出)
 * 如果使用lookup join,它的底层实现原理 和普通的 内外链接是完全不同的,没有为参与链接两张表的维护状态它是左表进行驱动的,当左表数据到来的时候,
 * 发送请求和右表进行关联
 */

public class DwdInteractionCommentInfo extends BaseSQLApp {
    public static void main(String[] args) {
//        // TODO 1.基本环境准备
//        // 1.1 指定流处理环境
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//        //1.2 设置并行度
//        env.setParallelism(4);
//        //1.3 指定表执行环境
//        StreamTableEnvironment tabEev = StreamTableEnvironment.create(env);
//        // TODO 2.检查点的相关设置
//        // 一般 在配置文件中设置 检查点机制  这里写一遍 练习下
//        /*
//        //2.1 开启检查点
//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE); //精准一次
//        //2.2 设置检查点超时时间
//        env.getCheckpointConfig().setCheckpointTimeout(6000L);
//        //2.3 设置状态取消后,检查点是否保留
//        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        //2.4设置两个检查点之间最小时间间隔
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L); // 上一个检查点结束时间 和 下一个检查点开启时间 之间的时间间隔
//        //2.5 设置重启策略
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(3)));
//        //2.6 设置状态后端
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall06A/ck/");
//        //2.7 设置操作hadoop 的用户
//        System.setProperty("HADOOP_USER_NAME","hadoop");
//         */
       new DwdInteractionCommentInfo().start(10012,4,Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO);

    }

    @Override
    public void handle(StreamExecutionEnvironment env, StreamTableEnvironment tabEev) {
        //TODO 3.从kafka的topic_db主题中读取数据 创建动态表   ---kafka连接器
        readOdsDb(tabEev,Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO);
        //TODO 4.过滤出评表
        Table commentInfo = tabEev.sqlQuery("" +
                "select \n" +
                "`data` ['id'] id,\n" +
                "`data` ['user_id'] user_id,\n" +
                "`data` ['sku_id'] sku_id,\n" +
                "`data` ['appraise'] appraise,\n" +
                "`data` ['comment_txt'] comment_txt,\n" +
                "`data` ['create_time'] create_time,\n" +
                " ts,\n" +
                " pt \n" +
                " from topic_db " +
                " where `database`='gmall' " +
                " and `table`='comment_info' " +
                " and `type`='insert' ");
//        comment_info.execute().print();
        // 将表对象注册到表执行环境中去  (临时表)
        tabEev.createTemporaryView("comment_info",commentInfo);

        //TODO 5.从HBase中读取字典数据 创建动态表        --hbase连接器
        readBaseDic(tabEev);
        //TODO 6.将评论表和字典表进行关联    ---lookup Join
        Table joinTable = tabEev.sqlQuery("select " +
                "ci.id, " +
                "ci.user_id," +
                "ci.sku_id," +
                "ci.appraise," +
                "dic.info.dic_name appraise_name," +
                "ci.comment_txt," +
                "ci.ts " +
                "from comment_info ci " +
                "join base_dic for system_time as of ci.pt as dic " +
                "on ci.appraise=dic.dic_code");
//        joinTable.execute().print();
        //TODO 7.将关联的结果写入到kafka主题中
        //7.1 创建动态表和 要写入的主题进行映射
        tabEev.executeSql("create table "+Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO+" (" +
                "id string, " +
                "user_id string," +
                "sku_id string," +
                "appraise string," +
                "appraise_name string," +
                "comment_txt string," +
                "ts bigint " +
                ")" +
                SQLUtil.getKafkaDDLSink(Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO));
        //7.2 写入
        joinTable.executeInsert(Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO);
    }




}
