package cn.doitedu.etl;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Job04_视频播放分析olap主题聚合宽表 {

    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");

        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 建表，映射 kafka 的dwd层的topic: dwd-events
        tenv.executeSql(
                "create table dwd_events_kafka(\n" +
                "     session_id         string\n" +
                "    ,event_id           string\n" +
                "    ,action_time        bigint\n" +
                "    ,properties         map<string,string>\n" +
                "    ,play_id as properties['play_id'] \n"  +
                "    ,video_id as properties['video_id'] \n"  +
                "    ,user_id bigint           \n" +
                "    ,province string          \n" +
                "    ,city string              \n" +
                "    ,region string            \n" +
                "    ,rt as to_timestamp_ltz(action_time,3) \n" +
                "    ,watermark for rt as rt  \n" +
                ") WITH (\n" +
                "    'connector' = 'kafka',\n" +
                "    'topic' = 'dwd-events',\n" +
                "    'properties.bootstrap.servers' = 'doitedu:9092',\n" +
                "    'properties.group.id' = 'doit44_g1',\n" +
                "    'scan.startup.mode' = 'latest-offset',\n" +
                "    'value.format' = 'json',\n" +
                "    'value.fields-include' = 'EXCEPT_KEY'\n" +
                ")                                                    ");


        tenv.executeSql("with tmp as (\n" +
                "select\n" +
                "  user_id,\n" +
                "  session_id,\n" +
                "  event_id,\n" +
                "  play_id,\n" +
                "  video_id,\n" +
                "  action_time,\n" +
                "  province,\n" +
                "  city,\n" +
                "  region,\n" +
                "  rt,\n" +
                "  if(event_id = 'video_resume',1,0) as flag\n" +
                "from dwd_events_kafka\n" +
                "where event_id in ('video_play','video_hb','video_pause','video_resume','video_stop')\n" +
                ")\n" +
                ", tmp2 as (\n" +
                "select\n" +
                "  user_id,\n" +
                "  session_id,\n" +
                "  event_id,\n" +
                "  play_id,\n" +
                "  video_id,\n" +
                "  action_time,\n" +
                "  province,\n" +
                "  city,\n" +
                "  region,\n" +
                "  rt,\n" +
                "  sum(flag) over(partition by user_id,session_id,play_id order by rt) as flag2\n" +
                "from tmp\n" +
                ")\n" +
                "select\n" +
                "    user_id,\n" +
                "    session_id,\n" +
                "    play_id,\n" +
                "    video_id,\n" +
                "    province,\n" +
                "    city,\n" +
                "    region,  \n" +
                "    flag2 ,\n" +
                "    min(action_time) as play_start_time \n" + // DORIS表中用 聚合算子  MIN
                "    max(action_time) as play_end_time \n" +   // DORIS表中用 聚合算子  MAX
                "from table(\n" +
                "    tumble(table tmp2,descriptor(rt),interval '1' minute)\n" +
                ")\n" +
                "group by \n" +
                "    window_start,\n" +
                "    window_end,\n" +
                "    user_id,\n" +
                "    session_id,\n" +
                "    play_id,\n" +
                "    video_id,\n" +
                "    province,\n" +
                "    city,\n" +
                "    region,  \n" +
                "    flag2 ").print();

        // 将上面的聚合结果  去 lookup join  hbase中的视频信息维表






    }


}
