package com.atguigu.gmallrealtime.app.dwd.db;

import com.atguigu.gmallrealtime.common.Constant;
import com.atguigu.gmallrealtime.util.HBaseUtil;
import com.atguigu.gmallrealtime.util.MyKafkaUtil;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author yhm
 * @create 2023-09-26 11:02
 */
public class DwdInteractionCommentInfo {
    public static void main(String[] args) {
        // TODO 1 创建flink环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2 添加检查点和状态后端
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//
//        //2.2 设置检查点超时时间
//        env.getCheckpointConfig().setCheckpointTimeout(60000L);
//        //2.3 设置job取消之后 检查点是否保留
//        env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        //2.4 设置两个检查点之间最小的时间间隔
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
//        //2.5 设置重启策略
//        // env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,3000L));
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30), Time.seconds(3)));
//
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
//
//        System.setProperty("HADOOP_USER_NAME","atguigu");

        // TODO 3 读取ODS层的原始数据
        // 使用flinkSQL读取kafka数据
        // sql读取kafka数据  kafka connector 不能加主键
        //  upsert kafka的一定加主键

        String groupId="dwd_interaction_comment_info";
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL(groupId));

//        tableEnv.sqlQuery("select * from topic_db").execute().print();


        // TODO 4 过滤出评论表数据
        Table commentTable = tableEnv.sqlQuery("SELECT \n" +
                "    data['id'] id,\n" +
                "    data['user_id'] user_id,\n" +
                "    data['nick_name'] nick_name,\n" +
                "    data['sku_id'] sku_id,\n" +
                "    data['spu_id'] spu_id,\n" +
                "    data['order_id'] order_id,\n" +
                "    data['appraise'] appraise,\n" +
                "    data['comment_txt'] comment_txt,\n" +
                "    ts,\n" +
                "    proc_time\n" +
                "from topic_db\n" +
                "Where `table`='comment_info'\n" +
                "and `type`='insert'");
        tableEnv.createTemporaryView("comment_info",commentTable);

//        tableEnv.sqlQuery("select * from comment_info").execute().print();


        // TODO 5 读取hbase的base_dic数据
        tableEnv.executeSql(HBaseUtil.getBaseDicDDL());

//        tableEnv.sqlQuery("select rowkey,info.dic_name from base_dic").execute().print();

        // TODO 6 使用look_up join
        // look_up join 使用处理时间
        Table resultTable = tableEnv.sqlQuery("SELECT \n" +
                "    c.id,\n" +
                "    c.user_id,\n" +
                "    c.nick_name,\n" +
                "    c.sku_id,\n" +
                "    c.spu_id,\n" +
                "    c.order_id,\n" +
                "    c.appraise,\n" +
                "    b.dic_name,\n" +
                "    c.comment_txt,\n" +
                "    c.ts,\n" +
                "    c.proc_time\n" +
                " FROM comment_info AS c\n" +
                " left JOIN base_dic FOR SYSTEM_TIME AS OF  c.proc_time AS b\n" +
                " ON c.appraise = b.rowkey");

        tableEnv.createTemporaryView("result_table",resultTable);

        // TODO 7 将结果数据写出到kafka中
        String sinkTopic = Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO;
        tableEnv.executeSql( "create table sink_kafka(\n" +
                "    id STRING,\n" +
                "    user_id STRING,\n" +
                "    nick_name STRING,\n" +
                "    sku_id STRING,\n" +
                "    spu_id STRING,\n" +
                "    order_id STRING,\n" +
                "    appraise STRING,\n" +
                "    dic_name STRING,\n" +
                "    comment_txt STRING,\n" +
                "    ts BIGINT,\n" +
                "    proc_time TIMESTAMP(3),\n" +
                "  PRIMARY KEY (id) NOT ENFORCED\n" +
                ")\n"
                + MyKafkaUtil.getUpsertKafkaDLL(sinkTopic));

        tableEnv.executeSql("insert into sink_kafka select * from result_table");

    }
}
