package cn.jly.flink.source2sink.mysql;

import cn.jly.flink.entity.Person;
import cn.jly.flink.utils.BusinessException;
import cn.jly.flink.utils.ConstantUtils;
import cn.jly.flink.utils.FlinkUtils;
import com.alibaba.fastjson.JSON;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.util.Properties;

/**
 * 从kafka中读数据，并写入到mysql
 *
 * @PackageName cn.jly.flink.source2sink.custom
 * @ClassName SinkToMysqlDemo
 * @Description mysql sink
 * @Author 姬岚洋
 * @Date 2021/1/15 上午 10:41
 */
public class SinkToMysqlDemo {
    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = FlinkUtils.getStreamExecutionEnv();

        final Properties properties = new Properties();
        properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, ConstantUtils.Kafka.BOOTSTRAP_SERVERS_VALUE);
        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "test");
        properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");

        final FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>(
                "person_test",
                new SimpleStringSchema(),
                properties
        );
        // 从最早开始消费
        kafkaConsumer.setStartFromEarliest();

        env.addSource(kafkaConsumer)
                .flatMap(new FlatMapFunction<String, Person>() {
                    @Override
                    public void flatMap(String s, Collector<Person> collector) throws Exception {
                        if (StringUtils.isNotEmpty(s)) {
                            collector.collect(JSON.parseObject(s, Person.class));
                        }
                    }
                })
                .addSink(new MysqlSink());

        FlinkUtils.executeStream(env, "SinkToMysqlDemo");
    }

    static class MysqlSink extends RichSinkFunction<Person> {
        Connection connection = null;
        PreparedStatement preparedStatement = null;

        @Override
        public void open(Configuration parameters) throws Exception {
            super.open(parameters);
            connection = ConstantUtils.Mysql.getConnection();
            if (connection == null) {
                throw new BusinessException("无法成功获取数据库连接");
            }
            String sql = "insert into tbl_person(last_name,email,gender,age) values(?,?,?,?)";
            preparedStatement = connection.prepareStatement(sql);
        }

        /**
         * 每来一条数据，执行一次
         *
         * @param person
         * @param context
         * @throws Exception
         */
        @Override
        public void invoke(Person person, Context context) throws Exception {
            // 组装数据
            preparedStatement.setString(1, person.getLastName());
            preparedStatement.setString(2, person.getEmail());
            preparedStatement.setInt(3, person.getGender());
            preparedStatement.setInt(4, person.getAge());
            preparedStatement.executeUpdate();
        }

        @Override
        public void close() throws Exception {
            super.close();
            // 释放资源
            ConstantUtils.Mysql.release(preparedStatement, connection);
        }
    }
}
