package com.atguigu.edu.realtime.app.dwd.traffic;

import com.atguigu.edu.realtime.common.KafkaTopicConfig;
import com.atguigu.edu.realtime.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @ClassName: DwdTrafficUserJumpDetail
 * @Description: TODO 流量域用户跳出事务事实表
 * @Author: zhaoxunfeng
 * @Date: 2022-08-31 21:32
 * @Version: 1.0.0
 */
public class DwdTrafficUserJumpDetail {
    public static void main(String[] args) {
        //TODO 1、创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);
        env.setParallelism(3);

        //TODO 2、读取页面日志并创建对应的表
        tableEnvironment.executeSql("" +
                "create table page_log\n" +
                "(\n" +
                "    `common` map<string, string>,\n" +
                "    page   map<string, string>,\n" +
                "    ts     string,\n" +
                "    dt as proctime()\n" +
                ") " + KafkaUtil.getKafkaDDL(KafkaTopicConfig.DWD_TRAFFIC_PAGE_LOG_TOPIC, "DwdTrafficUserJumpDetail"));

        //TODO 3、过滤出 也会会话中只有一个浏览页面的会话ID
        Table result = tableEnvironment.sqlQuery("" +
                "select common['sid'],\n" +
                "       common['is_new'],\n" +
                "       common['sc'],\n" +
                "       UNIX_TIMESTAMP() ts\n" +
                "from page_log\n" +
                "group by common['sid'], common['is_new'], common['sc']\n" +
                "having count(*) = 1");

        //TODO 4、将得到结果写出到对应 kafka 主题之中
        tableEnvironment.executeSql("" +
                "create table dwd_traffic_user_jump_detail(\n" +
                "    sid string,\n" +
                "    isNew string,\n" +
                "    sc string,\n" +
                "    ts bigint,\n" +
                "    primary key(sid) not ENFORCED\n" +
                ") " + KafkaUtil.getUpsertKafkaDDL(KafkaTopicConfig.DWD_TRAFFIC_JUMP_DETAIL_TOPIC));

        result.executeInsert("dwd_traffic_user_jump_detail");
    }
}
