package com.tgy.twodatabaseplus.kafka;

import com.mysql.cj.util.StringUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;

import java.text.SimpleDateFormat;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.format.DateTimeFormatter;
import java.util.Calendar;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;

/***
 * @ClassName: TgyProducer
 * @Description:
 * @Auther: 送你一块二向箔
 * @Date: 10:13 2022/4/2
 * @version : V1.0
 */

@Slf4j
@Component
public class TgyProducer {
    @Value("${kafka.topicName}")
    private String TOPIC_NAME;
    @Value("${kafka.msgValue}")
    private String msgValue;

    public static void main(String[] args) {
        async5();


        //格式化时间
        LocalDateTime now = LocalDateTime.now();
        DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("时间是:yyyy-MM-dd HH:mm:ss");
        System.out.println(now.format(dateTimeFormatter));

        //字符串转换位时间格式
        DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
        LocalDateTime lt = LocalDateTime.parse("2018-12-30T19:34:50.63",formatter);
        System.out.println(lt);
        async5();

    }

    //同步发送
    public void syncSend() {
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "47.101.193.71:9092");
//把发送的key从字符串序列化为字节数组
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
//把发送消息value从字符串序列化为字节数组
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        Producer<String, String> producer = new KafkaProducer<String, String>(props);

        //key,作用是决定了往那个分区发送，value具体发送的消息内容  可以指定分区 //未指定发送分区，具体发送的分区计算公式：hash(key)%partitionNum
        ProducerRecord<String, String> producerRecord = new ProducerRecord<String, String>(TOPIC_NAME, "key", msgValue);
        try {
            RecordMetadata metadata = null;
            metadata = producer.send(producerRecord).get();
            log.info("同步方式发送消息结果：" + "topic-" + metadata.topic() + "|partition-" + metadata.partition() + "|offset-" + metadata.offset());
        } catch (InterruptedException | ExecutionException e) {
            log.error("发送失败");
            e.printStackTrace();
        }
        producer.close();
    }

    //异步发送
    public static void asyncSend() {
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092,10.31.167.10:9093,10.31.167.10:9094");
//把发送的key从字符串序列化为字节数组
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
//把发送消息value从字符串序列化为字节数组
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        Producer<String, String> producer = new KafkaProducer<String, String>(props);

        //key,作用是决定了往那个topic发送，value具体发送的消息内容  可以指定分区 //未指定发送分区，具体发送的分区计算公式：hash(key)%partitionNum
        ProducerRecord<String, String> producerRecord = new ProducerRecord<String, String>("yaml", "key", "msg");

        producer.send(producerRecord, new Callback() {
            public void onCompletion(RecordMetadata metadata, Exception exception) {
                if (exception != null) {
                    System.err.println("异步发送消息失败：" +
                            exception.getStackTrace());
                }
                if (metadata != null) {
                    System.out.println("异步方式发送消息结果：" + "topic-" + metadata.topic() + "|partition-" + metadata.partition() + "|offset-" + metadata.offset());
                }
            }
        });
        producer.close();
    }


    public static void async5() {
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "47.101.193.71:9092");
        //把发送的key从字符串序列化为字节数组
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        //把发送消息value从字符串序列化为字节数组
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        Producer<String, String> producer = new KafkaProducer<String, String>(props);
        CountDownLatch countDownLatch = new CountDownLatch(5);
        SimpleDateFormat time = new SimpleDateFormat("时间是HH:mm:sss");


        for (int i = 0; i < 5; i++) {
            Calendar calendar = Calendar.getInstance();
            //key,作用是决定了往那个topic发送，value具体发送的消息内容  可以指定分区 //未指定发送分区，具体发送的分区计算公式：hash(key)%partitionNum
            ProducerRecord<String, String> producerRecord = new ProducerRecord<String, String>("yaml", "" + i, time.format(calendar.getTime()));

            producer.send(producerRecord, new Callback() {
                public void onCompletion(RecordMetadata metadata, Exception exception) {
                    if (exception != null) {
                        System.err.println("异步发送消息失败：" +
                                exception.getStackTrace());
                    }
                    if (metadata != null) {
                        System.out.println("异步方式发送消息结果：" + "topic-" + metadata.topic() + "|partition-" + metadata.partition() + "|offset-" + metadata.offset());
                    }
                    countDownLatch.countDown();
                }
            });
        }
        try {
            countDownLatch.await(5, TimeUnit.SECONDS);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        producer.close();


    }
}
