package com.atguigu.edu.realtime.app.dwd.db;

import com.atguigu.edu.realtime.util.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * 用户注册
 */
public class DwdUserRegister {
    public static void main(String[] args) {
        //获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //设置状态的TTL
        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(60));
        //从kafka db 读数据并创建动态表
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_user_regiser"));
        //tableEnv.executeSql("select * from topic_db").print();
        //过滤
        Table userInfo = tableEnv.sqlQuery("select " +
                "`data`['id'] id," +
                "`data`['create_time'] create_time," +
                "date_format(`data`['create_time'],'yyyy-MM-dd') create_date," +
                "ts " +
                "from topic_db " +
                "where `table` = 'user_info' " +
                "and `type` = 'insert' ");
        tableEnv.createTemporaryView("user_info",userInfo);
        //tableEnv.executeSql("select * from user_info").print();
        //从kafka log 读数据并创建动态表
        tableEnv.executeSql("CREATE TABLE dwd_traffic_page_log (\n" +
                "  `common` map<string,string>,\n" +
                "  `page` string,\n" +
                "  `ts` string \n" +
                ") " + MyKafkaUtil.getKafkaDDL("dwd_traffic_page_log", "dwd_traffic_page_log_groupId"));
        //tableEnv.executeSql("select * from dwd_traffic_page_log").print();
        //过滤
        Table pageTable = tableEnv.sqlQuery("select " +
                "`common`['uid'] id," +
                "`common`['ar'] ar," +
                "`common`['ch'] ch," +
                "`common`['mid'] mid\n" +
                "from dwd_traffic_page_log \n" +
                "where `common`['uid'] is not null ");//and `common`[is_new] == 1 这里只是用来补充信息，不用其他限定
        tableEnv.createTemporaryView("page_table",pageTable);
        //tableEnv.executeSql("select * from page_table").print();
        //关联join
        Table joinTable = tableEnv.sqlQuery("select " +
                "ui.id id, " +
                "ar, " +
                "ch, " +
                "mid, " +
                "create_time,\n" +
                "create_date, \n" +
                "ts, \n" +
                "current_row_timestamp() row_time \n" +
                "from user_info ui \n" +
                "left join page_table pt \n" +
                "on ui.id = pt.id ");
        tableEnv.createTemporaryView("join_table",joinTable);
        //tableEnv.executeSql("select * from join_table").print();

        //写入kafka
        tableEnv.executeSql("CREATE TABLE dwd_user_register (\n" +
                "  id string,\n" +
                "  ar string,\n" +
                "  ch string,\n" +
                "  mid string,\n" +
                "  create_time string,\n" +
                "  create_date string,\n" +
                "  ts string,\n" +
                "  row_time timestamp_LTZ(3),\n" +
                "  PRIMARY KEY (id) NOT ENFORCED\n" +  //写入kafka分区使用
                ") " + MyKafkaUtil.getUpsertKafkaDDL("dwd_user_register"));
        System.out.println("写入kafka");
        tableEnv.executeSql("insert into dwd_user_register select * from join_table");
        System.out.println("写入完毕");

    }
}
