package cn.zd.demo.flink.kafka2db;

import cn.zd.demo.flink.kafka2db.dto.KafkaMsgDto;
import cn.zd.demo.flink.kafka2db.mapfunction.JsonMapFunction;
import cn.zd.demo.flink.kafka2db.sink.MysqlSink;
import com.alibaba.fastjson2.JSON;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions.JdbcConnectionOptionsBuilder;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.Date;
import java.util.Properties;

public class Kafka2MySQL {
    private static final Logger log = LoggerFactory.getLogger(Kafka2MySQL.class);
    private static final String DB_DRIVERCLASS="com.mysql.cj.jdbc.Driver";
//    private static final String DB_URL="jdbc:mysql://192.168.58.22:3306/kafka2mysql?autoReconnect=true&useUnicode=true&characterEncoding=utf8&serverTimezone=GMT%2B8&useSSL=false";
//    private static final String DB_USERNAME = "root";
//    private static final String DB_USERPWD = "root";
    private static final String DB_URL="jdbc:mysql://192.168.58.22:2881/kafka2ob?autoReconnect=true&useUnicode=true&characterEncoding=utf8&serverTimezone=GMT%2B8&useSSL=false";
    private static final String DB_USERNAME = "ob";
    private static final String DB_USERPWD = "ob";


    public static void main(String[] args) throws Exception {
        String kafkaServer = "localhost:9092";
        String zkServer = "localhost:2181";
        String topic = null;
        String groupId = null;
        if (args.length == 2) {
            topic = args[0];
            groupId = args[1];
        }else if (args.length == 4) {
            kafkaServer = args[0];
            zkServer = args[1];
            topic = args[2];
            groupId = args[3];
        } else {
            throw new RuntimeException("请依次传入kafka、zk地址及topic、groupid");
        }
//        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        // Checkpoint
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        log.info("开始初始化配置,KAFKA:{},ZK:{},TOPIC:{},GROUPID:",kafkaServer,zkServer,topic,groupId);
        System.out.println("开始初始化配置,KAFKA:"+kafkaServer+",ZK:"+zkServer+",TOPIC:"+topic+",GROUPID:"+groupId);
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", kafkaServer);
        properties.setProperty("zookeeper.connect", zkServer);
        properties.setProperty("group.id", groupId);

        FlinkKafkaConsumer<String> sf = new FlinkKafkaConsumer<>(topic, new SimpleStringSchema(),
                properties);
        sf.setStartFromLatest();
        sf.setStartFromGroupOffsets();
        String finalTopic = topic;
        DataStream<KafkaMsgDto> dataStream = env.addSource(sf).map((MapFunction<String, KafkaMsgDto>) val -> {
            log.debug("开始处理消息：{}",val);
            try {
                KafkaMsgDto dto = JSON.parseObject(val, KafkaMsgDto.class);
                dto.setTopic(finalTopic);
                return dto;
            }catch (Exception e) {
                log.warn("消息转换失败！{}",val,e);
                return null;
            }
        }).filter(kafkaMsgDto -> kafkaMsgDto != null);

        SinkFunction jdbcSink = JdbcSink.sink("INSERT INTO tab_data1 (TOPIC, IN_DATETIME,CTEXT) VALUES (?, ?, ?)",(statement, o) -> {
            KafkaMsgDto value = (KafkaMsgDto) o;
            log.debug("开始存储数据到mysql：{}",value.getCtext());
            statement.setString(1, value.getTopic());
            statement.setDate(2, new Date(System.currentTimeMillis()));
            statement.setString(3, value.getCtext());
            statement.execute();
        }, new JdbcConnectionOptionsBuilder()
                .withUrl(DB_URL)
                .withPassword(DB_USERNAME)
                .withUsername(DB_USERPWD)
                .withDriverName(DB_DRIVERCLASS).build());
        dataStream.addSink(jdbcSink);
        log.info("开始任务Kafka2MySQL");
        System.out.println("开始任务Kafka2MySQL");
        env.execute("Kafka2MySQL");
    }
}
