package com.learn.util;



import com.learn.commmon.Constant;
import com.learn.serialier.CustomDeSerializationSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.kafka.clients.consumer.ConsumerRecord;

import java.util.Properties;
import java.util.regex.Pattern;

public class FlinkSourceUtil {

    public static FlinkKafkaConsumer<ConsumerRecord<String, String>> getKafkaSource(String groupId, String topic) {

        /**
         *     public synchronized Object setProperty(String key, String value) {
         *             return put(key, value);
         *     }
         */
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", Constant.KAFKA_BROKERS);
        props.setProperty("isolation.level","read_committed");
        props.setProperty("group.id", groupId);
        props.setProperty("auto.offset.reset", "latest");


        return new FlinkKafkaConsumer<ConsumerRecord<String, String>>(
                topic,
                new CustomDeSerializationSchema()
                , props
        );
    }


    /**
     * CREATE TABLE `vec_energy_syscode_record` (
     *   `id` int NOT NULL AUTO_INCREMENT COMMENT '自增Id',
     *   `vin` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci DEFAULT NULL COMMENT '车辆vin码',
     *   `energy_syscode` varchar(30) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci DEFAULT NULL COMMENT '可充电储能系统编码',
     *   `find_time` datetime DEFAULT NULL COMMENT '平台发现时间',
     *   `create_time` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP COMMENT '创建时间',
     *   PRIMARY KEY (`id`)
     * ) ENGINE=InnoDB AUTO_INCREMENT=178 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci COMMENT='车辆可充电储能系统编码记录表'
     */

    public static void readVinAndBatteryInfo(StreamTableEnvironment tEnv) {
        tEnv.executeSql("create table vec_energy_syscode_record(" +
                "id int, " +
                "vin string," +
                "energy_syscode string," +
                "find_time Timestamp," +
                "create_time Timestamp" +
                ")with(" +
                " 'connector'='jdbc',  " +
                " 'url' = 'jdbc:mysql://10.0.10.7:3306/gtb-big-service?useSSL=false'," +
                " 'table-name' = 'vec_energy_syscode_record', " +
                " 'username' = 'root',  " +
                " 'password' = 'Juwan@123'," +
                " 'lookup.cache.max-rows' = '10', " +
                " 'lookup.cache.ttl' = '1 hour' " +
                ")");
    }


    /**
     *
     * @param tEnv
     */
    public static void readBatteryAreaInfo(StreamTableEnvironment tEnv){
        tEnv.executeSql("create table vec_energy_syscode_record(" +
                "id int, " +
                "vin string," +
                "energy_syscode string," +
                "find_time Timestamp," +
                "create_time Timestamp" +
                ")with(" +
                " 'connector'='jdbc',  " +
                " 'url' = 'jdbc:mysql://10.0.10.7:3306/gtb-big-service?useSSL=false'," +
                " 'table-name' = 'vec_energy_syscode_record', " +
                " 'username' = 'root',  " +
                " 'password' = 'Juwan@123'," +
                " 'lookup.cache.max-rows' = '10', " +
                " 'lookup.cache.ttl' = '1 hour' " +
                ")");
    }

    public static FlinkKafkaConsumer<ConsumerRecord<String, String>> getKafkaSourceTopic(String groupId,String topic) {

        /**
         *     public synchronized Object setProperty(String key, String value) {
         *             return put(key, value);
         *     }
         */
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", Constant.KAFKA_BROKERS);
        props.setProperty("isolation.level","read_committed");
        props.setProperty("group.id", groupId);
        props.setProperty("auto.offset.reset", "latest");

        //todo 这个配置开启flink动态跟踪kafka数据
        props.setProperty(FlinkKafkaConsumerBase.KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS, "10");

        return new FlinkKafkaConsumer<ConsumerRecord<String, String>>(
                Pattern.compile(topic),
                new CustomDeSerializationSchema()
                , props
        );
    }

    public static FlinkKafkaConsumer<ConsumerRecord<String, String>> getKafkaSourceTopicHistory(String groupId,String topic) {

        /**
         *     public synchronized Object setProperty(String key, String value) {
         *             return put(key, value);
         *     }
         */
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", Constant.KAFKA_BROKERS);
        props.setProperty("isolation.level","read_committed");
        props.setProperty("group.id", groupId);
        props.setProperty("auto.offset.reset", "latest");

        //todo 这个配置开启flink动态跟踪kafka数据
        props.setProperty(FlinkKafkaConsumerBase.KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS, "10");

        FlinkKafkaConsumer<ConsumerRecord<String, String>> flinkKafkaConsumer = new FlinkKafkaConsumer<>(
                Pattern.compile(topic),
                new CustomDeSerializationSchema()
                , props
        );

       flinkKafkaConsumer.setStartFromEarliest();

       return flinkKafkaConsumer;
    }

}
