package com.lzwk.app.dwm;

/**
 * @Author: CC
 * @Date: 2022/1/5 17:57
 */

import com.lzwk.app.function.ToInt;
import com.lzwk.tableSql.mdb.Jdbc;
import com.lzwk.tableSql.kafka.StatLog;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class UniqueVisitStatLogSql {
    public static void createJdbcLookupTables(StreamTableEnvironment tableEnv) {
        Jdbc.lectureLectures(tableEnv);
    }

    public static void createSourceKafkaTables(StreamTableEnvironment tableEnv) {
        StatLog.statLogSource(tableEnv);
    }

    public static void createSinkKafkaTables(StreamTableEnvironment tableEnv) {
        StatLog.statLogLhSink(tableEnv);
    }

    public static void insertTable(StreamTableEnvironment tableEnv) {
        //过滤到播数据
        Table filterTable = tableEnv.sqlQuery("" +
                "select " +
                "    account_id, " +
                "    lecture_id, " +
                "    time_local, " +
                "    time_local_tz, " +
                "    DATE_FORMAT(time_local_tz, 'yyyy-MM-dd') AS dt, " +
                "    CAST(DAYOFWEEK(time_local_tz) AS INT) AS week_day, " +
                "    proc_time " +
                "from  " +
                "    source_stat_log " +
                "where " +
                "    action in ('classroom_stat_liveon','classroom_stat_online')");

        //开窗10s
        Table tumbleTable = tableEnv.sqlQuery("" +
                "select " +
                "    dt, " +
                "    week_day, " +
                "    account_id, " +
                "    lecture_id, " +
                "    min(time_local_tz) AS time_local_tz, " +
                "    max(proc_time) AS proc_time " +
                "from " + filterTable + " " +
                "group by " +
                "    dt, " +
                "    week_day, " +
                "    account_id, " +
                "    lecture_id, " +
                "    TUMBLE(time_local, INTERVAL '10' SECOND)");

        //输出今天第一次到播的数据
        Table attendClassTable = tableEnv.sqlQuery("" +
                "select " +
                "  dt," +
                "  week_day," +
                "  account_id," +
                "  lecture_id," +
                "  time_local_tz," +
                "  proc_time " +
                "from " +
                "   (select " +
                "       dt," +
                "       case " +
                "           when week_day = 1 then 7 " +
                "           when week_day > 1 then week_day-1 " +
                "       else 0 end week_day," +
                "       account_id," +
                "       lecture_id," +
                "       time_local_tz," +
                "       proc_time," +
                "       row_number() over (partition by dt,week_day,account_id,lecture_id order by time_local_tz) AS rowNum " +
                "from  " + tumbleTable + ") " +
                "where rowNum=1"
        );

        //添加维度信息
        Table resultTable = tableEnv.sqlQuery("" +
                "select " +
                "   t1.dt," +
                "   t1.week_day," +
                "   t1.account_id," +
                "   t1.lecture_id," +
                "   t1.time_local_tz," +
                "   t2.channel_id," +
                "   t2.liveroom_id " +
                "from "+ attendClassTable + " t1 " +
                "left join lectureLectures FOR SYSTEM_TIME AS OF `t1`.`proc_time` as `t2` " +
                "   on toInt(`t1`.`lecture_id`) = `t2`.`id`"
        );

        //插入到sink表
        tableEnv.executeSql("insert into sink_stat_log " +
                "select " +
                "   dt, " +
                "   week_day, " +
                "   toInt(account_id), " +
                "   toInt(lecture_id), " +
                "   IFNULL(channel_id,-1), " +
                "   IFNULL(liveroom_id,-1), " +
                "   time_local_tz AS visit_time " +
                "from " + resultTable);
    }

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        Configuration configuration = tableEnv.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone", "Asia/Shanghai");
//        configuration.setString("table.exec.state.ttl", "5s");
        configuration.setString("table.exec.source.idle-timeout", "5min");
        configuration.setString("pipeline.name", "stat_log_liveon_lh");
        configuration.setString("table.exec.state.ttl", "24h");
        //1.1 设置CK&状态后端
        env.setStateBackend(new EmbeddedRocksDBStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://10.2.16.4:4007/flink_113/checkpoints/ck");
        env.enableCheckpointing(600000L);
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(100000L);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(60 * 1000);

        tableEnv.createTemporarySystemFunction("toInt", new ToInt());
        UniqueVisitStatLogSql.createJdbcLookupTables(tableEnv);
        UniqueVisitStatLogSql.createSourceKafkaTables(tableEnv);
        UniqueVisitStatLogSql.createSinkKafkaTables(tableEnv);
        UniqueVisitStatLogSql.insertTable(tableEnv);

        env.execute("stat_log_liveon_lh");

    }
}
