package git.soulbgm;

import git.soulbgm.udf.SnowflakeUDF;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.io.IOException;

import static git.soulbgm.SQLUtil.*;

/**
 * kafka流作业
 *
 * @author SoulBGM
 * @date 2024-09-14
 */
public class KafkaStreamJob extends BaseSQLJob {

    public static void main(String[] args) throws IOException {
        SQLUtil.start(10012, 4, "E:/idea_workspace/flink-example/src/main/resources/job_sql/job_format_protostuff.sql");
        /*KafkaStreamJob job = new KafkaStreamJob();
        job.start(10012, 4);
        BaseSQLJob job2 = new BaseSQLJob() {
            @Override
            public void handle(StreamTableEnvironment tableEnv) {
                // 注册UDF
                tableEnv.executeSql("CREATE TEMPORARY SYSTEM FUNCTION snowflake_id AS '" + SnowflakeUDF.class.getName() + "'");
                // tableEnv.createTemporarySystemFunction("snowflake_id", SnowflakeUDF.class);

                //TODO 从kafka的packet_receive_sequence主题中读取数据 创建动态表       ---kafka连接器
                readOdsDb(tableEnv, "flink-test");
                readStatisticsJdbcDb(tableEnv, "recv_statistics");

                // 将统计结果写入到表中
                String sql = "INSERT INTO mysql_recv_statistics \n" +
                        "select snowflake_id(sourceCode) AS id, sourceCode as source_code, dataType as data_type, count(seqNum) as pack_num, \n" +
                        "window_start statistics_start_time, window_end statistics_end_time from TABLE( \n" +
                        "TUMBLE(TABLE topic_prs, DESCRIPTOR(receiveTime), INTERVAL '1' MINUTES)) \n" +
                        "GROUP BY sourceCode, dataType, window_start, window_end";
                System.out.println(sql);
                tableEnv.executeSql(sql);

                *//*tableEnv.executeSql("select snowflake_id(sourceCode) AS id, sourceCode as source_code, dataType as data_type, count(seqNum) as pack_num" +
                        ", now() statistics_start_time, now() statistics_end_time from topic_prs\n" +
                        "GROUP BY sourceCode, dataType").print();*//*
            }
        };
        job2.start(10013, 4);*/
    }

    @Override
    public void handle(StreamTableEnvironment tableEnv) {
        // 注册UDF
        tableEnv.executeSql("CREATE TEMPORARY SYSTEM FUNCTION snowflake_id AS '" + SnowflakeUDF.class.getName() + "'");
        // tableEnv.createTemporarySystemFunction("snowflake_id", SnowflakeUDF.class);

        //TODO 从kafka的packet_receive_sequence主题中读取数据 创建动态表       ---kafka连接器
        readOdsDb(tableEnv, "flink-test");
        readJdbcDb(tableEnv, "pack_seq");
        // tableEnv.executeSql("CREATE TABLE prs_statistics(sourceCode int, dataType int, packNum bigint, statisticsStartTime TIMESTAMP(3), statisticsEndTime TIMESTAMP(3)) WITH ( )");
        /*
         */
        /*String viewSql = "select sourceCode, dataType, count(seqNum) as packNum " +
                ",TUMBLE_START(receiveTime, INTERVAL '1' MINUTE) AS statisticsStartTime " +
                ",TUMBLE_END(receiveTime, INTERVAL '1' MINUTE) AS statisticsEndTime " +
                // ",TUMBLE_START(et, INTERVAL '10' SECOND)  as statisticsStartTime," +
                // "TUMBLE_END(et, INTERVAL '10' SECOND) as statisticsEndTime" +
                " from topic_prs group by sourceCode, dataType, TUMBLE(receiveTime, INTERVAL '1' MINUTE)";
        System.out.println(viewSql);*/
        // tableEnv.executeSql("select sourceCode, dataType, count(seqNum) as packNum from topic_prs group by sourceCode, dataType").print();//, TUMBLE(et, INTERVAL '10' SECOND)");
        // tableEnv.executeSql("select window_start, window_end, sourceCode, dataType, count(seqNum) as packNum from TABLE (TUMBLE( TABLE topic_prs, DESCRIPTOR(receiveTime), INTERVAL '1' MINUTE)) group by sourceCode, dataType, window_start, window_end").print();//, TUMBLE(et, INTERVAL '10' SECOND)");

        // tableEnv.executeSql("select window_start statisticsStartTime, window_end statisticsEndTime, sourceCode, dataType, count(seqNum) as packNum from TABLE( TUMBLE(TABLE topic_prs, DESCRIPTOR(receiveTime), INTERVAL '1' MINUTES)) GROUP BY sourceCode, dataType, window_start, window_end").print();

        // 数据库自增写法
        // tableEnv.executeSql("INSERT INTO mysql_prs (source_code, data_type, seq_num, receive_time) select sourceCode as source_code, dataType as data_type, seqNum as seq_num, receiveTime as receive_time from topic_prs");
        // 动态生成雪花ID写法
        tableEnv.executeSql("INSERT INTO mysql_prs select snowflake_id(sourceCode) AS id, sourceCode as source_code, dataType as data_type, seqNum as seq_num, receiveTime as receive_time from topic_prs");

        // " from TABLE(TUMBLE(TABLE topic_prs, DESCRIPTOR(et), INTERVAL '10' SECOND)) group by window_start, window_end");

        // tableEnv.executeSql("select sourceCode, dataType, seqNum, TO_TIMESTAMP_LTZ(receiveTime, 3) receiveTime from topic_prs").print();

        /*//TODO
        Table joinedTable = tableEnv.sqlQuery("SELECT\n" +
                "    id,\n" +
                "    user_id,\n" +
                "    sku_id,\n" +
                "    appraise,\n" +
                "    dic.dic_name appraise_name,\n" +
                "    comment_txt,\n" +
                "    ts\n" +
                "FROM comment_info AS c\n" +
                "  JOIN base_dic FOR SYSTEM_TIME AS OF c.pt AS dic\n" +
                "    ON c.appraise = dic.dic_code");
        //joinedTable.execute().print();

        //TODO 将关联的结果写到kafka主题中                    ---upsert kafka连接器
        //创建动态表和要写入的主题进行映射
        tableEnv.executeSql("CREATE TABLE " + Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO + " (\n" +
                "    id string,\n" +
                "    user_id string,\n" +
                "    sku_id string,\n" +
                "    appraise string,\n" +
                "    appraise_name string,\n" +
                "    comment_txt string,\n" +
                "    ts bigint,\n" +
                "    PRIMARY KEY (id) NOT ENFORCED\n" +
                ") " + getUpsertKafkaDDL("no1:9092", "packet_receive_sequence"));
        // 写入
        joinedTable.executeInsert("");*/
    }

    /**
     * 从topic_db主题中读取数据，创建动态表
     *
     * @param tableEnv 表env
     * @param groupId  组ID
     */
    public static void readOdsDb(StreamTableEnvironment tableEnv, String groupId) {
        String sql = "CREATE TABLE topic_prs (\n" +
                "  `sourceCode` int,\n" +
                "  `dataType` int,\n" +
                "  `seqNum` bigint,\n" +
                // "  `receiveTime` bigint,\n" +
                "  `receiveTime` TIMESTAMP(3),\n" +
                // "  et as TO_TIMESTAMP_LTZ(receiveTime, 3) " +
                // "  watermark for et as et - interval '10' second " +
                // " WATERMARK FOR receiveTime AS receiveTime - INTERVAL '10' SECOND " +
                "WATERMARK FOR receiveTime AS receiveTime - INTERVAL '10' SECOND" +
                ") " + getKafkaDDL("node1:9092", "packet_receive_sequence", groupId);
        System.out.println(sql);
        tableEnv.executeSql(sql);
    }

    public static void readJdbcDb(StreamTableEnvironment tableEnv, String tableName) {
        String sql = "CREATE TABLE mysql_prs (\n" +
                "  `id` bigint,\n" +
                "  `source_code` int,\n" +
                "  `data_type` int,\n" +
                "  `seq_num` bigint,\n" +
                "  `receive_time` TIMESTAMP(3)\n" +
                ") " + getJdbcDDL("jdbc:mysql://10.1.7.154:3306/pack_seq?useUnicode=true&allowMultiQueries=true&characterEncoding=utf8&transformedBitIsBoolean=yes&zeroDateTimeBehavior=convertToNull&useSSL=false&serverTimezone=GMT%2B8",
                "com.mysql.jdbc.Driver", "root", "db_soul_99G1", tableName);
        System.out.println(sql);
        tableEnv.executeSql(sql);
    }

    public static void readStatisticsJdbcDb(StreamTableEnvironment tableEnv, String tableName) {
        String sql = "CREATE TABLE mysql_recv_statistics (\n" +
                "  `id` bigint,\n" +
                "  `source_code` int,\n" +
                "  `data_type` int,\n" +
                "  `pack_num` bigint,\n" +
                "  `statistics_start_time` TIMESTAMP(3),\n" +
                "  `statistics_end_time` TIMESTAMP(3)\n" +
                ") " + getJdbcDDL("jdbc:mysql://10.1.7.154:3306/pack_seq?useUnicode=true&allowMultiQueries=true&characterEncoding=utf8&transformedBitIsBoolean=yes&zeroDateTimeBehavior=convertToNull&useSSL=false&serverTimezone=GMT%2B8",
                "com.mysql.jdbc.Driver", "root", "db_soul_99G1", tableName);
        System.out.println(sql);
        tableEnv.executeSql(sql);
    }

}
