package db.split.app;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdInteractionCommentInfo {
    public static void main(String[] args) {
        //1. 创建流环境
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 10016);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        System.setProperty("HADOOP_USER_NAME", "hadoop");
        // 2. 设置并行度
        // 在代码中，在算子设置、在配置文件设置 在参数中设置
        env.setParallelism(4);
        // 3.状态后端及检查点相关配置
        // 3.1 设置状态后端
//        env.setStateBackend(new HashMapStateBackend());
//
//        // 3.2 开启 checkpoint
//        env.enableCheckpointing(5000);
//        // 3.3 设置 checkpoint 模式: 精准一次
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        // 3.4 checkpoint 存储
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall2023/stream/" + ckAndGroupId);
//        // 3.5 checkpoint 并发数
//        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
//        // 3.6 checkpoint 之间的最小间隔
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(5000);
//        // 3.7 checkpoint  的超时时间
//        env.getCheckpointConfig().setCheckpointTimeout(10000);
//        // 3.8 job 取消时 checkpoint 保留策略
//        env.getCheckpointConfig().setExternalizedCheckpointCleanup(RETAIN_ON_CANCELLATION);

        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);

        // create table
//        tableEnvironment.executeSql();
        // select
//        tableEnvironment.sqlQuery();

        // 用sql过滤出评论数据
        tableEnvironment.executeSql("CREATE TABLE topic_db (\n" +
                "  `database` STRING,\n" +
                "  `table` STRING,\n" +
                "  `type` STRING,\n" +
                "  `data` MAP<STRING,STRING>\n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'topic_db',\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "  'properties.group.id' = 'testGroup',\n" +
                "  'scan.startup.mode' = 'earliest-offset',\n" +
                "  'format' = 'json'\n" +
                ")");


        Table comment = tableEnvironment.sqlQuery("select \n" +
                "  `data`['id'] id,\n" +
                "  `data`['user_id'] user_id,\n" +
                "  `data`['nick_name'] nick_name,\n" +
                "  `data`['appraise'] appraise\n" +
                "from topic_db\n" +
                "where `database` = 'gmall'\n" +
                "and `table` = 'comment_info'\n" +
                "and `type` = 'insert'");
        // 注册表
        tableEnvironment.createTemporaryView("comment_info", comment);


//        tableEnvironment.sqlQuery("select * from  comment_info").execute().print();

        // 读取mysql的码表

        tableEnvironment.executeSql("CREATE TABLE base_dic (\n" +
                "  dic_code STRING,\n" +
                "  dic_name STRING,\n" +
                "  PRIMARY KEY (dic_code) NOT ENFORCED\n" +
                ") WITH (\n" +
                "   'connector' = 'jdbc',\n" +
                "   'url' = 'jdbc:mysql://hadoop102:3306/gmall?useSSL=false',\n" +
                "   'table-name' = 'base_dic',\n" +
                "   'driver' = 'com.mysql.cj.jdbc.Driver',\n" +
                "   'username' = 'root',\n" +
                "   'password' = '123456'\n" +
                ")");


        tableEnvironment.sqlQuery("select \n" +
                "    id,\n" +
                "    user_id,\n" +
                "    nick_name,\n" +
                "    appraise,\n" +
                "    b.dic_name\n" +
                "from comment_info c\n" +
                "join base_dic b\n" +
                "on c.appraise = b.dic_code").execute().print();

        // 关联数据
    }
}
