package com.lzwk.app.dws.statLog;

import com.lzwk.app.dwm.UniqueVisitStatLogSql;
import com.lzwk.app.function.ToInt;
import com.lzwk.config.JobConfig;
import com.lzwk.tableSql.kafka.StatLog;
import com.lzwk.tableSql.mdb.Jdbc;
import com.lzwk.utils.MyKafkaUtil;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.OutputTag;

public class neZhaSql {

    public static void createJdbcLookupTables(StreamTableEnvironment tableEnv) {
        Jdbc.lectureLectures(tableEnv);
    }

    public static void createSourceKafkaTables(StreamTableEnvironment tableEnv) {
        StatLog.statLogSourceNZ(tableEnv);
    }

//    public static void createSinkKafkaTables(StreamTableEnvironment tableEnv) {
//        StatLog.statLogLhSink(tableEnv);
//    }

    public static void insertTable(StreamTableEnvironment tableEnv) {
        //窗口去重
        Table distinctTable = tableEnv.sqlQuery("" +
                "select " +
                "    TUMBLE_START(rt, INTERVAL '20' SECOND) stt," +
                "    DATE_FORMAT(TUMBLE_END(rt, INTERVAL '20' SECOND), 'yyyy-MM-dd HH:mm:ss') edt, " +
                "    account_id, " +
                "    link_id, " +
                "    booking_id, " +
                "    action, " +
                "    min(rt) as min_rt," +
                "    max(rt) as max_rt," +
                "    max(proc_time) as proc_time " +
                "from " +
                "    dwd_nezha_stat_log " +
                "group by " +
                "    action,account_id, link_id, booking_id, TUMBLE(rt, INTERVAL '20' SECOND)"
        );

        //按天统计
        Table dwsTable = tableEnv.sqlQuery("" +
                "select " +
                "    DATE_FORMAT(stt, 'yyyy-MM-dd') AS dt," +
                "    account_id, " +
                "    link_id, " +
                "    booking_id, " +
                "    action, " +
                "    count(1) as cc," +
                "    min(min_rt) as min_rt," +
                "    max(max_rt) as max_rt," +
                "    max(proc_time) as proc_time " +
                "from " + distinctTable + " " +
                "group by " +
                "    DATE_FORMAT(stt, 'yyyy-MM-dd'), action, account_id, link_id, booking_id "
        );

        //添加维度信息
        Table resultTable = tableEnv.sqlQuery("" +
                "select " +
                "   t1.dt," +
                "   t1.account_id," +
                "   t1.booking_id," +
                "   t2.stage_id," +
                "   t1.link_id," +
                "   t2.link_type," +
                "   t1.action," +
                "   t2.min_rt," +
                "   t1.max_rt," +
                "   t1.cc " +
                "from " + dwsTable + " t1 " +
                "left join wk_course.course_link FOR SYSTEM_TIME AS OF `t1`.`proc_time` as `t2` " +
                "   on toInt(`t1`.`link_id`) = `t2`.`id`"  +
                "left join wk_course.course_stage FOR SYSTEM_TIME AS OF `t1`.`proc_time` as `t2` " +
                "   on toInt(`t2`.`stage_id`) = `t3`.`id`" +
                "left join book.booking FOR SYSTEM_TIME AS OF `t1`.`proc_time` as `t2` " +
                "   on toInt(`t2`.`course_id`) = `t4.`id`"
        );

        DataStream<Tuple2<Boolean, Row>> tuple2DataStream = tableEnv.toRetractStream(dwsTable, Row.class);

        tuple2DataStream.print();

        //        //插入到sink表
//        tableEnv.executeSql("insert into sink_stat_log " +
//                "select " +
//                "   dt, " +
//                "   week_day, " +
//                "   toInt(account_id), " +
//                "   toInt(lecture_id), " +
//                "   IFNULL(channel_id,-1), " +
//                "   IFNULL(liveroom_id,-1), " +
//                "   time_local_tz AS visit_time " +
//                "from " + resultTable);
    }

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        Configuration configuration = tableEnv.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone", "Asia/Shanghai");
//        configuration.setString("table.exec.state.ttl", "5s");
        configuration.setString("table.exec.source.idle-timeout", "5min");
        configuration.setString("pipeline.name", "stat_log_nz");
        configuration.setString("table.exec.state.ttl", "25h");
        //1.1 设置CK&状态后端
//        env.setStateBackend(new EmbeddedRocksDBStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://10.2.16.4:4007/flink_113/checkpoints/ck");
//        env.enableCheckpointing(600000L);
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(100000L);
//        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(60 * 1000);

        neZhaSql.createJdbcLookupTables(tableEnv);
        neZhaSql.createSourceKafkaTables(tableEnv);
//        neZhaSql.createSinkKafkaTables(tableEnv);
        neZhaSql.insertTable(tableEnv);


        env.execute("stat_log_nz");
    }
}
