package com.lzwk.tableSql.kafka;

/**
 * @Author: CC
 * @Date: 2022/1/5 17:57
 */

import com.lzwk.utils.MyKafkaUtil;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class StatLog {
    public static String statLogSource(StreamTableEnvironment tableEnv) {
        String topic = "stat_log_mid_113";
        String groupId = "stat_log_attend_class_lh";
        String sql = "create table source_stat_log(" +
                "    `referer` VARCHAR, " +
                "    `remote_address` VARCHAR, " +
                "    `request` VARCHAR, " +
                "    `time_local` TIMESTAMP(3), " +
                "    `time_local_tz` AS TO_TIMESTAMP(FROM_UNIXTIME(UNIX_TIMESTAMP(DATE_FORMAT(TIMESTAMPADD(HOUR, 8, time_local), 'yyyy-MM-dd HH:mm:ss')),'yyyy-MM-dd HH:mm:ss')), " +
                "    `user_agent` VARCHAR, " +
                "    `x_forwarded_for` VARCHAR, " +
                "    `action` VARCHAR, " +
                "    `lecture_id` VARCHAR, " +
                "    `account_id` VARCHAR, " +
                "    `message_id` VARCHAR, " +
                "    `playback_rate` VARCHAR, " +
                "    `learn_time` VARCHAR, " +
                "    `proc_time` AS proctime(), " +
                "    WATERMARK FOR time_local AS time_local - INTERVAL '15' SECOND " +
                ") with (" + MyKafkaUtil.getKafkaDDL(topic, groupId) + ")";
        tableEnv.executeSql(sql);
        return sql;
    }

    public static void statLogSourceNZ(StreamTableEnvironment tableEnv) {
        String topic = "dwd_nezha_stat_log";
        String groupId = "data_group_stat";
        String sql = "create table dwd_nezha_stat_log(" +
                "    `action` STRING, " +
                "    `booking_id` STRING, " +
                "    `ts` BIGINT, " +
                "    `time_local` STRING, " +
                "    `rt` as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)), " +
                "    `link_id` STRING, " +
                "    `account_id` STRING, " +
                "    `proc_time` AS proctime(), " +
                "    WATERMARK FOR rt AS rt - INTERVAL '15' SECOND " +
                ") with (" + MyKafkaUtil.getKafkaDDL(topic, groupId) + ")";
        tableEnv.executeSql(sql);
    }

    public static void statLogLhSink(StreamTableEnvironment tableEnv) {
        String topic = "stat_log_liveon_lh";
        String sql = "create table sink_stat_log( " +
                "    `dt` VARCHAR, " +
                "    `week_day` INT, " +
                "    `account_id` INT, " +
                "    `lecture_id` INT, " +
                "    `channel_id` INT, " +
                "    `liveroom_id` INT, " +
                "    `visit_time` TIMESTAMP(3), " +
                "    PRIMARY KEY (`dt`, `week_day`, `account_id`,`lecture_id`) NOT ENFORCED " +
                ") with (" + MyKafkaUtil.getKafkaDDL(topic) + ")";
        tableEnv.executeSql(sql);
    }
}
