package com.atguigu.edu.realtime.app.dwd.db;

import com.atguigu.edu.realtime.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Classname DwdPlayVideoPlayTimes
 * @Date 2022/11/19 9:58
 * @Created by arun
 */
/*
{"common":{"sc":"1","ar":"23","uid":"57","os":"iOS 13.2.9","ch":"Appstore","is_new":"1","md":"iPhone X","mid":"mid_61",
"vc":"v2.1.134","ba":"iPhone","sid":"09a0fbac-586d-4d02-bcd8-5f449560e25f"},
"appVideo":{"play_sec":30,"position_sec":90,"video_id":"1439"},
"ts":1668770107258}
 */
public class DwdPlayVideoPlayTimes {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 读取 dwd_traffic_appVideo_log 主题的数据
        tableEnv.executeSql("" +
                "CREATE TABLE dwd_traffic_appVideo_log(\n" +
                "    `ts` string,\n" +
                "    `common` MAP<string,string>,\n" +
                "    `appVideo` MAP<string,string>,\n" +
                "    `proc_time` AS PROCTIME()\n" +
                ") WITH (\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic' = 'dwd_traffic_appVideo_log',\n" +
                "    'properties.bootstrap.servers' = '8.130.45.219:9092,39.101.77.181:9092,39.101.65.42:9092',\n" +
                "    'properties.group.id' = 'dwd_play_videoPlay_group',\n" +
                "    'scan.startup.mode' = 'earliest-offset',\n" +
                "    'format' = 'json'\n" +
                ")");
//         tableEnv.executeSql("select * from dwd_traffic_appVideo_log").print();

        // 过滤出视频播放明细
        Table videoPlay = tableEnv.sqlQuery("SELECT\n" +
                "    cast (appVideo['video_id'] as bigint) video_id,\n" +
                "    appVideo['play_sec'] play_sec,\n" +
                "    appVideo['position_sec'] position_sec,\n" +
                "    common['uid'] uid,\n" +
                "    common['mid'] mid,\n" +
                "    common['sid'] sid,\n" +
                "    ts ts,\n" +
                "    proc_time\n" +
                "FROM `dwd_traffic_appVideo_log`");
        tableEnv.createTemporaryView("videoPlay", videoPlay);
        // tableEnv.executeSql("SELECT * FROM videoPlay").print();

        // 从mysql中读取章节表
        tableEnv.executeSql("CREATE TABLE chapter_info(\n" +
                "    id bigint,\n" +
                "    video_id bigint\n" +
                ") WITH(\n" +
                "    'connector' = 'jdbc', \n" +
                "    'url' = 'jdbc:mysql://hadoop101:3306/edu',\n" +
                "    'driver' = 'com.mysql.cj.jdbc.Driver',\n" +
                "    'username' = 'root',\n" +
                "    'password' = '000000',\n" +
                "    'table-name' = 'chapter_info',\n" +
                "    'lookup.cache.max-rows' = '500',\n" +
                "    'lookup.cache.ttl' = '1 hour'\n" +
                ")");
//         tableEnv.executeSql("SELECT * FROM chapter_info limit 10").print();

        // 两表关联
        Table resultTable = tableEnv.sqlQuery("SELECT \n" +
                "    `videoPlay`.video_id,\n" +
                "    `videoPlay`.uid,\n" +
                "    `videoPlay`.mid,\n" +
                "    `videoPlay`.sid,\n" +
                "    `videoPlay`.play_sec,\n" +
                "    `videoPlay`.position_sec,\n" +
                "    `videoPlay`.ts,\n" +
                "    `chapter_info`.id chapterId\n" +
                "FROM \n" +
                "`videoPlay` \n" +
                "JOIN `chapter_info` FOR SYSTEM_TIME AS OF videoPlay.proc_time\n" +
                "ON `videoPlay`.video_id = `chapter_info`.video_id");
        tableEnv.createTemporaryView("result_table", resultTable);
        // tableEnv.executeSql("SELECT * FROM result_table").print();

        // 创建一个动态表和要写出的kafka主题进行映射
        tableEnv.executeSql("" +
                "CREATE TABLE dwd_play_videoPlayTimes(\n" +
                "    video_id bigint,\n" +
                "    uid string,\n" +
                "    mid string ,\n" +
                "    sid string ,\n" +
                "    play_sec string,\n" +
                "    position_sec string,\n" +
                "    ts string,\n" +
                "    chapterId bigint,\n" +
                "    primary key(video_id) not enforced\n" +
                ")" + MyKafkaUtil.getUpsertKafkaDDL("dwd_play_videoPlayTimes"));
        tableEnv.executeSql("insert into dwd_play_videoPlayTimes \n" +
                "select * from result_table");

    }
}
