package com.atguigu.gmall.realtime.app.dwd.db;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * Created by 黄凯 on 2023/7/10 0010 12:51
 *
 * @author 黄凯
 * 永远相信美好的事情总会发生.
 * <p>
 * 互动域：评论事实表
 * * 需要启动的进程
 * * zk、kafka、maxwell、hdfs、hbase、DwdInteractionCommentInfo
 */
public class DwdInteractionCommentInfo {

    public static void main(String[] args) {

        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //1.3 指定表执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO 2.检查点相关的设置(略)
        //TODO 3.从topic_db主题中读取数据  创建动态表
        String topic = "topic_db";
        String groupId = "dwd_interaction_comment_group";

        tableEnv.executeSql("CREATE TABLE topic_db (\n" +
                "                            `database` STRING,\n" +
                "                            `table` STRING,\n" +
                "                            `type` STRING,\n" +
                "                            `ts` STRING,\n" +
                "                            `data` MAP<STRING,STRING>,\n" +
                "                            `old` MAP<STRING,STRING>,\n" +
                "                            proc_time as PROCTIME()\n" +
                ") WITH (\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic' = '" + topic + "',\n" +
                "    'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "    'properties.group.id' = '" + groupId + "',\n" +
                "    'scan.startup.mode' = 'latest-offset',\n" +
                "    'format' = 'json'\n" +
                ")");

//        TableResult tableResult = tableEnv.executeSql("select * from topic_db");
//        tableResult.print();

        //TODO 4.过滤出评论表数据
        /**
         * {"database":"gmall","table":"payment_info","type":"update","ts":1688965470,"xid":3311,
         * "commit":true,"data":{"id":35839,"out_trade_no":"731429271267741","order_id":47212,"user_id":797,
         * "payment_type":"1102","trade_no":null,"total_amount":488.00,
         * "subject":"香奈儿（Chanel）女士香水5号香水 粉邂逅柔情淡香水EDT 粉邂逅淡香水35ml等1件商品","payment_status":"1602",
         * "create_time":"2022-06-08 13:04:29","callback_time":"2022-06-08 13:04:30","callback_content":"callback xxxxxxx",
         * "operate_time":"2022-06-08 13:04:30"},
         * "old":{"payment_status":"1601","callback_time":null,"callback_content":null,"operate_time":null}}
         */
        Table commentInfo = tableEnv.sqlQuery("select\n" +
                "    data['id'] id,\n" +
                "    data['user_id'] user_id,\n" +
                "    data['sku_id'] sku_id,\n" +
                "    data['appraise'] appraise,\n" +
                "    data['comment_txt'] comment_txt,\n" +
                "    ts,\n" +
                "    proc_time\n" +
                "from topic_db\n" +
                "where `table` = 'comment_info'\n" +
                "and `type` = 'insert'");

        tableEnv.createTemporaryView("comment_info", commentInfo);
//        tableEnv.executeSql("select * from comment_info").print();

        //TODO 5.从hbase的字典表中读取数据 创建动态表
        /**
         * # 'lookup.cache' = 'PARTIAL' PARTIAL是缓存部分数据的意思
         * #'lookup.partial-cache.max-rows' = '300' 最大缓存300条，再来数据就删除老的
         * #'lookup.partial-cache.expire-after-access' = '1 hour' 只要读了，1小时之后过期，续命
         */
        tableEnv.executeSql("CREATE TABLE base_dic (\n" +
                "                        rowkey STRING,\n" +
                "                        info ROW<dic_name STRING>,\n" +
                "                        PRIMARY KEY (rowkey) NOT ENFORCED\n" +
                ") WITH (\n" +
                "    'connector' = 'hbase-2.2',\n" +
                "    'table-name' = 'gmall0201_realtime:dim_base_dic',\n" +
                "    'zookeeper.quorum' = 'hadoop102:2181,hadoop103:2181,hadoop104:2181',\n" +
                "    'lookup.cache' = 'PARTIAL',\n" +
                "    'lookup.partial-cache.max-rows' = '300',\n" +
                "    'lookup.partial-cache.expire-after-write' = '1 hour',\n" +
                "    'lookup.partial-cache.expire-after-access' = '1 hour'\n" +
                ")");

//        tableEnv.executeSql("select * from base_dic").print();

        //TODO 6.将评论和字段进行关联
        Table resTable = tableEnv.sqlQuery("SELECT\n" +
                "    id,\n" +
                "    user_id,\n" +
                "    sku_id,\n" +
                "    appraise,\n" +
                "    dic.dic_name appraise_name,\n" +
                "    comment_txt,\n" +
                "    ts\n" +
                "FROM comment_info AS ci\n" +
                "         JOIN base_dic FOR SYSTEM_TIME AS OF ci.proc_time AS dic\n" +
                "ON ci.appraise = dic.rowkey");

        tableEnv.createTemporaryView("res_table",resTable);
//        tableEnv.executeSql("select * from res_table").print();

        //TODO 7.将关联的结果写到kafka主题
        //7.1 创建一个动态表和要写入的主题进行映射
        tableEnv.executeSql("CREATE TABLE dwd_interaction_comment (\n" +
                "\n" +
                "                                      id STRING,\n" +
                "                                      user_id STRING,\n" +
                "                                      sku_id STRING,\n" +
                "                                      appraise STRING,\n" +
                "                                      appraise_name STRING,\n" +
                "                                      comment_txt STRING,\n" +
                "                                      ts STRING,\n" +
                "                                      PRIMARY KEY (id) NOT ENFORCED\n" +
                ") WITH (\n" +
                "    'connector' = 'upsert-kafka',\n" +
                "    'topic' = 'dwd_interaction_comment',\n" +
                "    'properties.bootstrap.servers' = 'hadoop102:9092,hadoop103:9092,hadoop104:9092',\n" +
                "    'key.format' = 'json',\n" +
                "    'value.format' = 'json'\n" +
                ")");

        //7.2 写入
        tableEnv.executeSql("insert into dwd_interaction_comment select * from res_table");


//        tableEnv.executeSql("select * from dwd_interaction_comment").print();


    }

}
