package com.demo.kafka.dongli.producer;



import com.demo.kafka.dongli.model.User;
import com.demo.kafka.dongli.util.JSONUtils;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.header.internals.RecordHeaders;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.kafka.support.SendResult;
import org.springframework.messaging.Message;
import org.springframework.messaging.support.MessageBuilder;

import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import javax.annotation.Resource;
import java.nio.charset.StandardCharsets;
import java.util.Date;
import java.util.concurrent.CompletableFuture;

@RestController
@RequestMapping("dongli")
@Api(tags = "动力节点demo")
public class EventProducer {

    //加入了spring-kafka依赖 + .yml配置信息，springboot自动配置好了kafka，自动装配好了KafkaTemplate这个Bean
    @Resource
    private KafkaTemplate<String, String> kafkaTemplate;
    @Resource
    @Qualifier("customerPartitioner")
    private KafkaTemplate<String, String> kafkaTemplate4;

    @Resource
    private KafkaTemplate<String, Object> kafkaTemplate2;

    @Resource
    private KafkaTemplate<Object, Object> kafkaTemplate3;
    @ApiOperation("简单消息")
    @GetMapping("sendEvent")
    public void sendEvent() {
        kafkaTemplate.send("hello-topic", "hello kafka");
    }

    @ApiOperation("通过构建器模式创建Message对象")
    @GetMapping("sendEvent2")
    public void sendEvent2() {
        //通过构建器模式创建Message对象
        Message<String> message = MessageBuilder.withPayload("hello kafka")
                .setHeader(KafkaHeaders.TOPIC, "test-topic-02") //在header中放置topic的名字
                .build();
        kafkaTemplate.send(message);
    }

    @ApiOperation("通过构建器模式创建Message批量对象")
    @GetMapping("sendEventBatch")
    public void sendEventBatch() {
        //通过构建器模式创建Message对象
        for (int i=0;i<10;i++){
            Message<String> message = MessageBuilder.withPayload("hello kafka"+i+":次消息+++++\n")
                    .setHeader(KafkaHeaders.TOPIC, "test-topic-02") //在header中放置topic的名字
                    .build();
            kafkaTemplate.send(message);
        }

    }

    @ApiOperation("Headers里面放的信息")
    @GetMapping("sendEvent3")
    public void sendEvent3() {
        //Headers里面是放一些信息(信息是key-value键值对)，到时候消费者接收到该消息后，可以拿到这个Headers里面放的信息
        Headers headers = new RecordHeaders();
        headers.add("phone", "13709090909".getBytes(StandardCharsets.UTF_8));
        headers.add("orderId", "OD158932723742".getBytes(StandardCharsets.UTF_8));

        //String topic, Integer partition, Long timestamp, K key, V value, Iterable<Header> headers
        ProducerRecord<String, String> record = new ProducerRecord<>(
                "test-topic-Headers",
                0,
                System.currentTimeMillis(),
                "k1",
                "hello kafka",
                headers
        );
        kafkaTemplate.send(record);
    }
    @ApiOperation("分区时间ke")
    @GetMapping("sendEvent4")
    public void sendEvent4() {
        //String topic, Integer partition, Long timestamp, K key, V data
        kafkaTemplate.send("test-topic-partition", 0, System.currentTimeMillis(), "k2", "hello kafka");
    }
    @ApiOperation("不指定topic 使用默认")
    @GetMapping("sendEvent5")
    public void sendEvent5() {
        //Integer partition, Long timestamp, K key, V data
        kafkaTemplate.sendDefault(0, System.currentTimeMillis(), "k3", "hello kafka");
    }
    @ApiOperation("获取异步结果")
    @GetMapping("sendEvent6")
    public void sendEvent6() {
        //Integer partition, Long timestamp, K key, V data
        CompletableFuture<SendResult<String, String>> completableFuture
                = (CompletableFuture<SendResult<String, String>>) kafkaTemplate.sendDefault(0, System.currentTimeMillis(), "k3", "hello kafka");

        //怎么拿到结果，通过CompletableFuture这个类拿结果，这个类里面有很多方法
        try {
            //1、阻塞等待的方式拿结果
            SendResult<String, String> sendResult = completableFuture.get();
            if (sendResult.getRecordMetadata() != null) {
                //kafka服务器确认已经接收到了消息
                System.out.println("消息发送成功: " + sendResult.getRecordMetadata().toString());
            }
            System.out.println("producerRecord: " + sendResult.getProducerRecord());

        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
    @ApiOperation("非阻塞的方式拿结果")
    @GetMapping("sendEvent7")
    public void sendEvent7() {
        //Integer partition, Long timestamp, K key, V data
        CompletableFuture<SendResult<String, String>> completableFuture
                = (CompletableFuture<SendResult<String, String>>) kafkaTemplate.sendDefault(0, System.currentTimeMillis(), "k3", "hello kafka");

        //怎么拿到结果，通过CompletableFuture这个类拿结果，这个类里面有很多方法
        try {
            //2、非阻塞的方式拿结果
            completableFuture.thenAccept((sendResult) -> {
                if (sendResult.getRecordMetadata() != null) {
                    //kafka服务器确认已经接收到了消息
                    System.out.println("消息发送成功: " + sendResult.getRecordMetadata().toString());
                }
                System.out.println("producerRecord: " + sendResult.getProducerRecord());
            }).exceptionally((t) -> {
                t.printStackTrace();
                //做失败的处理
                return null;
            });

        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
    @ApiOperation("封装实体")
    @GetMapping("sendEvent8")
    public void sendEvent8() {
        User user = User.builder().id(1208).phone("13709090909").birthDay(new Date()).build();
        //分区是null，让kafka自己去决定把消息发到哪个分区
        kafkaTemplate3.sendDefault(null, System.currentTimeMillis(), "k3", user);
    }
    @ApiOperation("分区是null，让kafka自己去决定把消息发到哪个分区")
    @GetMapping("sendEvent9")
    public void sendEvent9() {
        User user = User.builder().id(1208).phone("13709090909").birthDay(new Date()).build();
        //分区是null，让kafka自己去决定把消息发到哪个分区
        kafkaTemplate2.send("hello-topic-entity", null, System.currentTimeMillis(), "k9", user);
    }
    @ApiOperation("分区是null，让kafka自己去决定把消息发到哪个分区，topic不为空")
    @GetMapping("sendEvent10")
    public void sendEvent10() {
        User user = User.builder().id(1208).phone("13709090909").birthDay(new Date()).build();
        //分区是null，让kafka自己去决定把消息发到哪个分区
        kafkaTemplate2.send("hello-topic-entity", user);
        // 实体不信任
//        User user = User.builder().id(1209).phone("13709090909").birthDay(new Date()).build();
//        String userJSON = JSONUtils.toJSON(user);
//        kafkaTemplate.send("helloTopic", userJSON);
    }
    @ApiOperation("自定义分区策略")
    @GetMapping("sendEvent11")
    public void sendEvent11() {
        //String topic, Integer partition, Long timestamp, K key, V data
        kafkaTemplate4.send("myTopic", null, System.currentTimeMillis(), "k11", "hello kafka");
    }

    @ApiOperation("批量发送消息")
    @GetMapping("sendEvent12")
    public void sendEvent12() {
        for (int i = 0; i < 25; i++) {
            User user = User.builder().id(i).phone("1370909090"+i).birthDay(new Date()).build();
            String userJSON = JSONUtils.toJSON(user);
            kafkaTemplate.send("batch-topic", "k" + i, userJSON);
        }
    }

}
