package com.atguigu.bigdata.edu.realtime.app.dws;

import com.atguigu.bigdata.edu.realtime.app.BaseSQLApp;
import com.atguigu.bigdata.edu.realtime.bean.VideoPlayBean;
import com.atguigu.bigdata.edu.realtime.common.Constant;
import com.atguigu.bigdata.edu.realtime.util.FlinkSinkUtil;
import com.atguigu.bigdata.edu.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

public class Dws_17_DwsChapterPlay extends BaseSQLApp {
    public static void main(String[] args) {
        new Dws_17_DwsChapterPlay().init(3214,2,"Dws_17_DwsChapterPlay");
    }

    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {
        tEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(10));
            //从dwd的video play拿日志信息
        //TODO 这个需求并没有完全实现，只是将窗口开到了1分钟，实现聚合效果要在ck中使用SQL
        tEnv.executeSql("create table dwd_traffic_video_play ( " +
                "common map<string,string> ," +
                "appVideo map<string,string> , " +
                "ts bigint  ," +
                //lookup join 必须有pt处理时间字段
                "pt as proctime()," +
                "et as to_timestamp_ltz(ts, 3), " +
                //设置乱序时间
                "watermark for et as et - interval '2' seconds " +
                ") " + SQLUtil.getKafkaSource(Constant.TOPIC_DWD_TRAFFIC_VIDEO_PLAY,"Dws_17_DwsChapterPlay"));
        //从mysql的chapter—info表拿取维度信息

        tEnv.executeSql("create table chapter_info ( " +
                "  id bigint, " +
                "  chapter_name string " +
                ") WITH ( " +
                "  'connector' = 'jdbc', " +
                "  'driver' = 'com.mysql.cj.jdbc.Driver', " +
                "  'url' = 'jdbc:mysql://hadoop162:3306/edu?useSSL=false', " +
                "  'table-name' = 'chapter_info', " +
                "  'username' = 'root', " +
                "  'password' = 'aaaaaa',  " +
                "  'lookup.cache.max-rows' = '10'," +
                "  'lookup.cache.ttl' = '1 hour' " +
                ")");
        //从mysql的video—info表拿取维度信息
        tEnv.executeSql("create table video_info ( " +
                "  id bigint, " +
                "  chapter_id bigint " +
                ") WITH ( " +
                "  'connector' = 'jdbc', " +
                "  'driver' = 'com.mysql.cj.jdbc.Driver', " +
                "  'url' = 'jdbc:mysql://hadoop162:3306/edu?useSSL=false', " +
                "  'table-name' = 'video_info', " +
                "  'username' = 'root', " +
                "  'password' = 'aaaaaa',  " +
                "  'lookup.cache.max-rows' = '10'," +
                "  'lookup.cache.ttl' = '1 hour' " +
                ")");
        //三张表join在一起
        Table tableResult = tEnv.sqlQuery("select \n" +
                "    `common`['uid'] uid,\n" +
                "    `common`['sid'] sid,\n" +
                "    `appVideo`['play_sec'] play_sec,\n" +
                "    `appVideo`['video_id'] video_id,\n" +
                "    chapter_id,\n" +
                "    chapter_name,  \n" +
                "    et, \n" +
                "    `ts`\n" +
                "    from dwd_traffic_video_play d \n" +
                "join video_info for system_time as of d.pt as v\n" +
                "    on v.id = cast(d.`appVideo`['video_id'] as bigint) \n" +
                "join  chapter_info for system_time as of d.pt as c" +
                " on v.chapter_id = c.id\n");
        //设置空闲时间120s，sql中有join的情况下需要设置
        tEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(120));
        tEnv.createTemporaryView("tableResult",tableResult);
        //拿取聚合数据，注意字段要和bean 以及 clickhouse中的数据类型一致
        Table table = tEnv.sqlQuery("select date_format(window_start, 'yyyy-MM-dd HH:mm:ss') stt,\n" +
                "       date_format(window_end, 'yyyy-MM-dd HH:mm:ss')   edt,\n" +
                "       sid                              ,\n" +
                "       cast(uid as bigint)                              play_people,\n" +
                "       sum(cast(play_sec as bigint))                    play_time,\n" +
                "       chapter_id,\n" +
                "       chapter_name,\n" +
                "       unix_timestamp() * 1000                          ts\n" +
                "from table(tumble(table tableResult, descriptor(et), interval '5' second))\n" +
                "group by chapter_name,sid,uid, chapter_id, window_start, window_end");
        //TODO 这里使用tvf;实现复杂需求的时候sql比流更简单
        SingleOutputStreamOperator<VideoPlayBean> result = tEnv.toRetractStream(table, VideoPlayBean.class)
                //TODO 注意：这里不能使用驼峰命名，因为toRetractStream需要字段和bean的属性完全一致
                .filter(t -> t.f0)
                .map(t -> t.f1);
        result.addSink(FlinkSinkUtil.getClickHouseSink("dws_chapter_play",VideoPlayBean.class));
        result.print();



        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}





/*
学习：观看视频，一个会话的视频播放记录

观看:
  video_info
    insert 一条数据
      insert
  order_detail
    insert 多条多数据 粒度是 sku
    只要 sku 相关的信息, 需要 省份, 用户信息 ...
      insert

  order_detail_activity
    详情参数的活动
      insert
  order_detail_coupon
    详情使用的优惠券信息
      insert
  base_dic
    字典表, 用来退化维度


order_detail
  join  on 订单 id
order_info
  left join on 详情 id
order_detail_activity
  left join on 详情 id
order_detail_coupon
  lookup join
base_dic
 */

