package com.lzwk.app.dws.statLog;

import com.lzwk.app.dwm.UniqueVisitStatLogSql;
import com.lzwk.app.function.ToInt;
import com.lzwk.beans.StatLogWk;
import com.lzwk.config.JobConfig;
import com.lzwk.tableSql.kafka.StatLog;
import com.lzwk.tableSql.mdb.Jdbc;
import com.lzwk.utils.MyKafkaUtil;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

/**
 * @Author: CC
 * @Date: 2022/3/9 11:28
 */
public class weiKeSql {
    public static void createJdbcLookupTables(StreamTableEnvironment tableEnv) {
        Jdbc.lectureLectures(tableEnv);
    }

    public static void createSourceKafkaTables(StreamTableEnvironment tableEnv) {
        StatLog.statLogSource(tableEnv);
    }

//    public static void createSinkKafkaTables(StreamTableEnvironment tableEnv) {
//        StatLog.statLogLhSink(tableEnv);
//    }

    public static void insertTable(StreamTableEnvironment tableEnv) {
        //过滤到播数据
        Table filterTable = tableEnv.sqlQuery("" +
                "WITH fin_source AS (\n" +
                "                SELECT\n" +
                "                    referer,\n" +
                "                    remote_address,\n" +
                "                    request,\n" +
                "                    time_local,\n" +
                "                    time_local_tz,\n" +
                "                    user_agent,\n" +
                "                    x_forwarded_for,\n" +
                "                    action,\n" +
                "                    if(lecture_id is null,-1,to_int(lecture_id)) as lecture_id,\n" +
                "                    if(account_id is null,-1,to_int(account_id)) as account_id,\n" +
                "                    message_id,\n" +
                "                    playback_rate,\n" +
                "                    proc_time,\n" +
                "                    if(learn_time is null,-1,to_int(learn_time)) as learn_time \n" +
                "                FROM\n" +
                "                    source_stat_log\n" +
                "                WHERE\n" +
                "                    timestampDiff(DAY, time_local, LOCALTIMESTAMP) < 2\n" +
                "                    AND action in ('classroom_stat_online','longest_learn_time','listen_audio','classroom_stat_liveon')\n" +
                "            ) \n" +
                "            SELECT \n" +
                "                T1.dt,\n" +
                "                T1.lecture_id,\n" +
                "                T1.account_id,\n" +
                "                T1.action,\n" +
                "                IFNULL(lecture.channel_id,-1) AS channel_id, \n" +
                "                IFNULL(lecture.liveroom_id,-1) AS liveroom_id, \n" +
                "                T1.cc,\n" +
                "                T1.learn_time,\n" +
                "                T1.first_visit_time, \n" +
                "                T1.last_visit_time \n" +
                "            FROM \n" +
                "                (SELECT\n" +
                "                    cast(DATE_FORMAT(TIMESTAMPADD(HOUR, 8,window_start),'yyyy-MM-dd') AS TIMESTAMP(3)) as dt,              \n" +
                "                    lecture_id, \n" +
                "                    account_id,\n" +
                "                    action,\n" +
                "                    count(1) as cc,\n" +
                "                    max(learn_time) as learn_time,\n" +
                "                    min(visit_time) as first_visit_time,\n" +
                "                    max(visit_time) as last_visit_time,\n" +
                "                    max(proc_time) as proc_time\n" +
                "                FROM\n" +
                "                    (\n" +
                "                        SELECT\n" +
                "                            TUMBLE_START(time_local, INTERVAL '1' MINUTE) as window_start,\n" +
                "                            TUMBLE_END(time_local, INTERVAL '1' MINUTE) as window_end,\n" +
                "                            account_id, \n" +
                "                            lecture_id, \n" +
                "                            action,\n" +
                "                            max(proc_time) as proc_time,\n" +
                "                            max(learn_time) as learn_time,\n" +
                "                            max(time_local_tz) as visit_time \n" +
                "                        FROM fin_source \n" +
                "                        GROUP BY \n" +
                "                            account_id, lecture_id, action, TUMBLE(time_local, INTERVAL '1' MINUTE)\n" +
                "                    ) AS T\n" +
                "                GROUP BY\n" +
                "                    DATE_FORMAT(TIMESTAMPADD(HOUR, 8,window_start),'yyyy-MM-dd'), account_id, lecture_id, action) T1\n" +
                "                LEFT JOIN `lectureLectures` FOR SYSTEM_TIME AS OF `T1`.`proc_time` as `lecture` \n" +
                "                        on `T1`.`lecture_id` = `lecture`.`id` ");

        DataStream<Tuple2<Boolean, Row>> tuple2DataStream = tableEnv.toRetractStream(filterTable, Row.class);

        tuple2DataStream.print();
    }

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        Configuration configuration = tableEnv.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone", "Asia/Shanghai");
//        configuration.setString("table.exec.state.ttl", "5s");
        configuration.setString("table.exec.source.idle-timeout", "5min");
        configuration.setString("pipeline.name", "stat_log_liveon_lh");
        configuration.setString("table.exec.state.ttl", "24h");
        //1.1 设置CK&状态后端
//        env.setStateBackend(new EmbeddedRocksDBStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://10.2.16.4:4007/flink_113/checkpoints/ck");
//        env.enableCheckpointing(600000L);
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(100000L);
//        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(60 * 1000);

        tableEnv.createTemporarySystemFunction("to_int", new ToInt());
        weiKeSql.createJdbcLookupTables(tableEnv);
        weiKeSql.createSourceKafkaTables(tableEnv);
//        weiKeSql.createSinkKafkaTables(tableEnv);
        weiKeSql.insertTable(tableEnv);

        env.execute("stat_log_liveon_lh");

    }
}
