package com.kafka.dongli.producer;

import com.kafka.dongli.model.User;
import com.kafka.dongli.util.JSONUtils;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.tags.Tag;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.header.internals.RecordHeaders;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.kafka.support.SendResult;
import org.springframework.messaging.Message;
import org.springframework.messaging.support.MessageBuilder;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import javax.annotation.Resource;
import java.nio.charset.StandardCharsets;
import java.util.Date;
import java.util.concurrent.CompletableFuture;

/**
 * @ClassName: ProducerAcontroller
 * @Description:
 * @Author: 橙哈哈
 * @Date: 2025-09-19 23:45
 **/
@RestController
@RequestMapping("dongli")
@Tag(name = "生产者发送消息")
public class ProducerController {


    @Resource
    KafkaTemplate<String, String> kafkaTemplate;


    @Resource
    KafkaTemplate<String, Object> kafkaTemplateDomain;

    @Resource
    @Qualifier("customerPartitioner")
    KafkaTemplate<String, String> customerKafkaTemplate;

    @GetMapping("simple")
    @Operation(summary = "简单发送")
    public String simple() {
        kafkaTemplate.send("simple-topic", "I is simple");
        return "发送成功";
    }


    @Operation(summary = "通过构建器模式创建Message对象")
    @GetMapping("sendEvent2")
    public void sendEvent2() {
        //通过构建器模式创建Message对象
        Message<String> message = MessageBuilder.withPayload(" message domain")
                .setHeader(KafkaHeaders.TOPIC, "message-topic") //在header中放置topic的名字
                .build();
        kafkaTemplate.send(message);
    }


    @Operation(summary = "Headers里面放的信息")
    @GetMapping("sendEvent3")
    public void sendEvent3() {
        //Headers里面是放一些信息(信息是key-value键值对)，到时候消费者接收到该消息后，可以拿到这个Headers里面放的信息
        Headers headers = new RecordHeaders();
        headers.add("phone", "13709090909".getBytes(StandardCharsets.UTF_8));
        headers.add("orderId", "OD158932723742".getBytes(StandardCharsets.UTF_8));

        //String topic, Integer partition, Long timestamp, K key, V value, Iterable<Header> headers
        ProducerRecord<String, String> record = new ProducerRecord<>(
                "test-topic-Headers",
                0,
                System.currentTimeMillis(),
                "k1",
                "hello kafka",
                headers
        );
        kafkaTemplate.send(record);
    }

    @Operation(summary = "发送消息指定分区")
    @GetMapping("sendPartition")
    public String sendPartition() {
        //String topic, Integer partition, Long timestamp, K key, V data

        kafkaTemplate.send("partition-topic", 0, System.currentTimeMillis(), "", "我是指定分区消息:1");

        return "发送成功";
    }
    @Operation(summary = "发送消息指定分区01")
    @GetMapping("sendPartition01")
    public String sendPartition01() {
        //String topic, Integer partition, Long timestamp, K key, V data
        for (int i = 0; i < 3; i++){
            kafkaTemplate.send("partition-topic", i, System.currentTimeMillis(), "", "我是指定分区消息:"+ i+"随机消息");
        }

        return "发送成功";
    }


    @Operation(summary = "发送消息指定分区02")
    @GetMapping("sendPartition02")
    public String sendPartition02() {
        //String topic, Integer partition, Long timestamp, K key, V data

        kafkaTemplate.send("partition-topic", 2, System.currentTimeMillis(), "", "我是指定分区消息:2");

        return "发送成功";
    }

    @Operation(summary = "不指定topic,使用默认")
    @GetMapping("sendDefaultTopic")
    public void sendDefaultTopic() {
        //Integer partition, Long timestamp, K key, V data
        kafkaTemplate.sendDefault(0, System.currentTimeMillis(), "k3", "我是默认主题消息");
    }


    @Operation(summary = "获取异步结果")
    @GetMapping("sendEvent6")
    public void sendEvent6() {
        //Integer partition, Long timestamp, K key, V data
        CompletableFuture<SendResult<String, String>> completableFuture
                = (CompletableFuture<SendResult<String, String>>) kafkaTemplate.sendDefault(0, System.currentTimeMillis(), "k3", "hello kafka");

        //怎么拿到结果，通过CompletableFuture这个类拿结果，这个类里面有很多方法
        try {
            //1、阻塞等待的方式拿结果
            SendResult<String, String> sendResult = completableFuture.get();
            if (sendResult.getRecordMetadata() != null) {
                //kafka服务器确认已经接收到了消息
                System.out.println("消息发送成功: " + sendResult.getRecordMetadata().toString());
            }
            System.out.println("producerRecord: " + sendResult.getProducerRecord());

        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }

    @Operation(summary = "非阻塞的方式拿结果")
    @GetMapping("sendEvent7")
    public void sendEvent7() {
        //Integer partition, Long timestamp, K key, V data
        CompletableFuture<SendResult<String, String>> completableFuture
                = (CompletableFuture<SendResult<String, String>>) kafkaTemplate.sendDefault(0, System.currentTimeMillis(), "k3", "hello kafka");

        //怎么拿到结果，通过CompletableFuture这个类拿结果，这个类里面有很多方法
        try {
            //2、非阻塞的方式拿结果
            completableFuture.thenAccept((sendResult) -> {
                if (sendResult.getRecordMetadata() != null) {
                    //kafka服务器确认已经接收到了消息
                    System.out.println("消息发送成功: " + sendResult.getRecordMetadata().toString());
                }
                System.out.println("producerRecord: " + sendResult.getProducerRecord());
            }).exceptionally((t) -> {
                t.printStackTrace();
                //做失败的处理
                return null;
            });

        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
    @Operation(summary = "发送实体对象消息")
    @GetMapping("sendDomain")
    public void sendDomain() {
        User user = User.builder().id(1208).phone("13709090909").birthDay(new Date()).build();
        kafkaTemplateDomain.send("domain-topic", 0,System.currentTimeMillis(), "k3", user);
    }

    @Operation(summary = "发送实体对象消息-json")
    @GetMapping("sendDomainJson")
    public void sendDomainJson() {
        User user = User.builder().id(1208).phone("13709090909").birthDay(new Date()).build();
        //分区是null，让kafka自己去决定把消息发到哪个分区
        kafkaTemplate.send("domain-topic",1, System.currentTimeMillis(), "k3", JSONUtils.toJSON(user));
    }


    @Operation(summary = "回调机制")
    @GetMapping("sendCallBack")
    public void sendCallBack() {
        //分区是null，让kafka自己去决定把消息发到哪个分区
        kafkaTemplate.send("callBack-topic",0, System.currentTimeMillis(), "k3", "我成功到达服务器");
    }

    @Operation(summary = "回调机制")
    @GetMapping("sendHandleAck")
    public void sendHandleAck() {
        //分区是null，让kafka自己去决定把消息发到哪个分区
        kafkaTemplate.send("ack-topic",0, System.currentTimeMillis(), "k3", "我是一条手动确认消息");
    }





    @Operation(summary = "批量发送与消费")
    @GetMapping("sendBatch")
    public void sendEvent() {
        for (int i = 0; i < 125; i++) {
            User user = User.builder().id(i).phone("1370909090"+i).birthDay(new Date()).build();
            String userJSON = JSONUtils.toJSON(user);
            kafkaTemplate.send("batchTopic", "k" + i, userJSON);
        }
    }


    @Operation(summary = "消息转发")
    @GetMapping("sendBatch")
    public void sendTurn() {

        kafkaTemplate.send("turn-topic-a", 0, null, "消息转发");
    }



    @Operation(summary = "自定义分区策略")
    @GetMapping("sendCustomer")
    public void sendCustomer() {
        //String topic, Integer partition, Long timestamp, K key, V data
        customerKafkaTemplate.send("customer-partation-topic", 0,  "k11", "自定义分区策略消息");
    }

    @Operation(summary = "消费者拦截器")
    @GetMapping("sendConsumer")
    public void sendConsumer() {

        kafkaTemplate.send("consumer-inter-topic", 0, null, "消费者拦截器消息");
    }


}
