package com.zy.gmall.realtime.app.dwd.db;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdInteractionCommentInfo {
    //评论事实表  mysql comment_info hbase 字典表
    public static void main(String[] args) {
        //1 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度 注意和kafka主题并行度
        env.setParallelism(4);
        //获取表环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //是否设置检查点存活时间 默认一直存在
        //env.getConfig();
        //2 检查点设置(略)
        //3 从kafka读取数据
        tableEnv.executeSql("CREATE TABLE KafkaSource (\n" +
                "  `database` STRING,\n" +
                "  `table` STRING,\n" +
                "  `type` STRING,\n" +
                "  `ts` STRING,\n" +
                "  `data` MAP<STRING,STRING>,\n" +
                "  `old` MAP<STRING,STRING>,\n" +
                "  proc_time AS PROCTIME()\n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'topic_db',\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092,hadoop103:9092,hadoop104:9092',\n" +
                "  'properties.group.id' = 'DwdInteractionCommentInfo',\n" +
                "  'scan.startup.mode' = 'latest-offset',\n" +
                "  'format' = 'json'\n" +
                ")");
        //tableEnv.executeSql("select * from KafkaSource").print();

        //4 过滤出要的数据
        Table commentTable = tableEnv.sqlQuery("select " +
                "`data`['id'] id," +
                "`data`['user_id'] user_id," +
                "`data`['sku_id'] sku_id," +
                "`data`['appraise'] appraise," +
                "`data`['comment_txt'] comment_txt ," +
                " ts , " +
                " proc_time" +
                " from " +
                " KafkaSource " +
                "where `table` = 'comment_info' and `type` = 'insert' ");
        tableEnv.createTemporaryView("commentTable",commentTable);
        //tableEnv.executeSql("select * from commentTable").print();
        //5 从hbase获取数据
        tableEnv.executeSql("CREATE TABLE dimDicTable (\n" +
                " dic_code STRING,\n" +
                " info ROW<dic_name STRING> ,\n" +
                " PRIMARY KEY (dic_code) NOT ENFORCED\n" +
                ") WITH (\n" +
                " 'connector' = 'hbase-2.2',\n" +
                " 'table-name' = 'gmall_realtime:dim_base_dic',\n" +
                " 'zookeeper.quorum' = 'hadoop102:2181,hadoop103:2181,hadoop104:2181',\n" +
                " 'lookup.async' = 'true',\n" +
                " 'lookup.cache' = 'PARTIAL',\n" +
                " 'lookup.partial-cache.max-rows' = '500',\n" +
                " 'lookup.partial-cache.expire-after-write' = '1 hour',\n" +
                " 'lookup.partial-cache.expire-after-access' = '1 hour'\n" +
                ")");
        //tableEnv.executeSql("select * from dimDicTable").print();
        //6 关联表
        Table sqlQuery = tableEnv.sqlQuery("select ct.id, ct.user_id, ct.sku_id, ct.appraise, ddt.dic_name appraise_name, ct.comment_txt " +
                " from commentTable as ct join dimDicTable FOR SYSTEM_TIME AS OF ct.proc_time as ddt on ct.appraise = ddt.dic_code");
        tableEnv.createTemporaryView("kfSink",sqlQuery);
        //tableEnv.executeSql("select * from kfSink").print();
        //7 输出到kafka主题中
        tableEnv.executeSql("CREATE TABLE KafkaSink (\n" +
                "  `id` STRING,\n" +
                "  `user_id` STRING,\n" +
                "  `sku_id` STRING,\n" +
                "  `appraise` STRING,\n" +
                "  `appraise_name` STRING,\n" +
                "  `comment_txt` STRING,\n" +
                "   PRIMARY KEY (id) NOT ENFORCED" +
                ") WITH (\n" +
                "  'connector' = 'upsert-kafka',\n" +
                "  'topic' = 'test',\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092,hadoop103:9092,hadoop104:9092',\n" +
                "  'key.format' = 'json',\n" +
                "  'value.format' = 'json'\n" +
                ")");

        tableEnv.executeSql("insert into KafkaSink select * from kfSink");


    }

}
