package com.doit.day01;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.sql.*;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;

/**
 * 创建一个topic    user-info
 * 写一个消费者来消费这个topic user_info 的数据
 * 将数据写入到mysql中
 * <p>
 * 精准一次性消费
 */
public class Kafka2Mysql {
    public static void main(String[] args) throws SQLException {
        Properties props = new Properties();
        //设置kafka所需要的参数
        //
        props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "linux01:9092");
        props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "G02");
        //可选择的配置
        /**
         * 紧接着上次下次消费的位置开始读嘛？ 做不到
         * 我自己去提交偏移量
         */
        props.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
        //如果上面自动提交偏移量变成false，下面这个参数就没用
//        props.setProperty(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,"10000");
        props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

        Connection conn = DriverManager.getConnection("jdbc:mysql://localhost:3306/test01", "root", "123456");

        conn.setAutoCommit(false);

        //这个sql是用来做主逻辑==》将数据写入到mysql中
        PreparedStatement pps = conn.prepareStatement("insert into user_info values(?,?,?,?)");
        //手动提交偏移量到mysql中
        PreparedStatement updateOffset = conn.prepareStatement("insert into consumer_offset values(?,?) on DUPLICATE key update offset = ?");

        //读取之前到底消费到哪一个偏移量
        PreparedStatement getOffset = conn.prepareStatement("select offset from consumer_offset where group_topic_partition = ?");

        //写一个kafka的消费者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);

        //指定从上次消费的地方开始消费
        //订阅主题
//        consumer.subscribe(Arrays.asList("user-info"));
        TopicPartition topicPartition0 = new TopicPartition("user-info", 0);
        TopicPartition topicPartition1 = new TopicPartition("user-info", 1);
        TopicPartition topicPartition2 = new TopicPartition("user-info", 2);
        consumer.assign(Arrays.asList(topicPartition0, topicPartition1, topicPartition2));

        //指定从哪个分区的那个offset中读取数据
        getOffset.setString(1, "G02_user-info_0");
        ResultSet resultSet = getOffset.executeQuery();
        if (resultSet.next()) {
                long offset = resultSet.getLong(1);
                consumer.seek(topicPartition0, offset+1);
        } else {
            consumer.seek(topicPartition0, 0);
        }

        getOffset.setString(1, "G02_user-info_1");
        ResultSet resultSet1 = getOffset.executeQuery();
        if (resultSet1.next()) {
                long offset = resultSet1.getLong(1);
                consumer.seek(topicPartition1, offset+1);
        } else {
            consumer.seek(topicPartition1, 0);
        }

        getOffset.setString(1, "G02_user-info_2");
        ResultSet resultSet2 = getOffset.executeQuery();
        if (resultSet2.next()){
                long offset = resultSet2.getLong(1);
                consumer.seek(topicPartition2, offset+1);
        }else {
            consumer.seek(topicPartition2, 0);
        }

        //拉取数据
        while (true) {
            ConsumerRecords<String, String> poll = consumer.poll(Duration.ofMillis(Integer.MAX_VALUE));
            for (ConsumerRecord<String, String> record : poll) {
                try {
                    //获取到kafka中读取的数据
                    String value = record.value();
                    System.out.println("读取到的值是：" + value);
                    //1,zss,18,male
                    String[] arr = value.split(",");
                    //接下来要需要处理的逻辑。将数据写入到mysql数据库中==》jdbc获取连接对象  pps
                    //设置值
                    pps.setInt(1, Integer.parseInt(arr[0]));
                    pps.setString(2, arr[1]);
                    pps.setInt(3, Integer.parseInt(arr[2]));
                    pps.setString(4, arr[3]);
                    //执行sql
                    pps.execute();

                    if (Integer.parseInt(arr[0]) == 5) {
                        throw new Exception("我自己造的异常，来打我呀");
                    }

                    //手动提交偏移量==> 提交到哪去呢？ __consumer_offset
                    String topic = record.topic();
                    int partition = record.partition();
                    long offset = record.offset();
                    String g_t_p = "G02" + "_" + topic + "_" + partition;
                    updateOffset.setString(1, g_t_p);
                    updateOffset.setLong(2, offset);
                    updateOffset.setLong(3, offset);
                    updateOffset.execute();
                    //提交事务
                    conn.commit();
                } catch (Exception e) {
                    //如果出现了异常，需要回滚事务
                    conn.rollback();
                    System.out.println(e);

                }


            }
        }


    }
}
