package com.mojo.kafka.controller;

import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.mojo.kafka.bean.User;
import com.mojo.response.ResultVO;
import org.apache.commons.logging.Log;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.annotation.KafkaListeners;
import org.springframework.kafka.annotation.RetryableTopic;
import org.springframework.kafka.annotation.TopicPartition;
import org.springframework.kafka.core.KafkaOperations;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.kafka.support.KafkaUtils;
import org.springframework.kafka.support.SendResult;
import org.springframework.retry.annotation.Backoff;
import org.springframework.util.concurrent.ListenableFutureCallback;
import org.springframework.web.bind.annotation.*;

import javax.annotation.Resource;
import java.util.List;
import java.util.Optional;

/**
 * kafka
 *
 * @author <a href="mailto:sjj@jianzhimao.com">mojo</a>
 * copyright (C), 2013-2023, 广州九尾信息科技有限公司
 * @date 2023/5/8 16:09
 */
@RestController
@RequestMapping("/kafka")
public class KafkaController {
    private static final Logger LOG = LoggerFactory.getLogger(KafkaController.class);
    private static final String TOPIC = "mojo-topic";
    @Resource
    private KafkaTemplate<String, Object> kafkaTemplate;
    int n = 0;

    @GetMapping("/test")
    public ResultVO<String> testKafka() {
        return new ResultVO<>("test");
    }

    @GetMapping("/send/{input}")
    public void sendFoo(@PathVariable String input) {
        kafkaTemplate.send(TOPIC, input);
    }

    /**
     * 回调函数生产者,第一种方式
     */
    @GetMapping("/send/callback1")
    public void sendCallback1(@RequestParam String message) {
        kafkaTemplate.send(TOPIC, message).addCallback(success -> {
            RecordMetadata metadata = success.getRecordMetadata();
            LOG.info("主题：{}", metadata.topic());
            LOG.info("主题分区：{}", metadata.partition());
            LOG.info("主题offset：{}", metadata.offset());
        }, failure -> {
            LOG.error("发送消息失败:{}", failure.getMessage());
        });
    }

    @GetMapping("/send/callback2")
    public void sendCallback2(@RequestParam(required = false) String message) {

        User user = new User();
        user.setAge(11);
        user.setName(message);
        ObjectMapper mapper = new ObjectMapper();
        String userStr;
        try {
            userStr = mapper.writeValueAsString(user);
        } catch (JsonProcessingException e) {
            throw new RuntimeException(e);
        }
        kafkaTemplate.send(TOPIC,  userStr).addCallback(new ListenableFutureCallback<SendResult<String,
                Object>>() {
            @Override
            public void onFailure(Throwable ex) {
                LOG.error("消息发送失败：{}", ex.getMessage());
            }

            @Override
            public void onSuccess(SendResult<String, Object> result) {
                RecordMetadata metadata = result.getRecordMetadata();
                LOG.info("主题：{}", metadata.topic());
                LOG.info("主题分区：{}", metadata.partition());
                LOG.info("主题offset：{}", metadata.offset());
            }
        });
    }

    /**
     * kafka事务提交
     * <p>
     * 需要配置文件配置开启事务
     */
    @GetMapping("/transaction")
    public void transaction(@RequestParam String message) {
        //kafkaTemplate.inTransaction();
        String ans = kafkaTemplate.executeInTransaction(operations -> {
            operations.send(TOPIC, message, message);
            //throw new RuntimeException("发送失败");
            int n = 1 / 0;
            // return "发送成功";
            operations.send(TOPIC, message + "重试", message + "重试");
            return "发送成功";
        });
        LOG.info("transaction result:{}", ans);
    }

    @GetMapping("/execute")
    public void kafkaTemplateExecute(@RequestParam String message) {
        kafkaTemplate.execute(new KafkaOperations.ProducerCallback<String, Object, Object>() {
            @Override
            public Object doInKafka(Producer<String, Object> producer) {

                return null;
            }
        });
    }
    // @KafkaListener(topics = TOPIC)
    // public ResultVO<String> listen(String input) {
    //     LOG.info("listen ==> {}", input);
    //     return new ResultVO<>(input);
    // }

    // @KafkaListener(topics = TOPIC)
    // public void batchListen(List<ConsumerRecord<String, String>> records) {
    //     for (ConsumerRecord<String, String> record : records) {
    //         Optional<String> optional = Optional.ofNullable(record.value());
    //         if (!optional.isPresent()) {
    //             LOG.warn("消息不存在");
    //         }
    //         String message = optional.get();
    //         ObjectMapper mapper = new ObjectMapper();
    //         try {
    //             User user = mapper.readValue(message, User.class);
    //             LOG.info("batch listen mojo-topic:{}", user);
    //         } catch (JsonProcessingException e) {
    //             throw new RuntimeException(e);
    //         }
    //
    //     }
    //
    // }

    /**
     * topicPattern 消费指定分区
     */
    // @KafkaListener( topicPattern = "mojo.*")
    // public void listenPartition(List<ConsumerRecord<String, String>> records) {
    //     for (ConsumerRecord<String, String> record : records) {
    //         Optional<String> optional = Optional.ofNullable(record.value());
    //         if (!optional.isPresent()) {
    //             LOG.warn("消息不存在");
    //         }
    //         String message = optional.get();
    //         ObjectMapper mapper = new ObjectMapper();
    //         try {
    //             User user = mapper.readValue(message, User.class);
    //             LOG.info("listen partition mojo-topic:{}", user);
    //         } catch (JsonProcessingException e) {
    //             throw new RuntimeException(e);
    //         }
    //
    //     }
    // }

    /**
     * topicPartitions 指定主题及分区消费
     */
    @KafkaListener(id = "myGroup", topicPartitions = @TopicPartition(topic = TOPIC, partitions = {"0", "1"}), groupId =
            "defaultConsumerGroup")
    public void assignPartition(List<ConsumerRecord<String, String>> records, Consumer consumer) {
        for (ConsumerRecord<String, String> record : records) {
            String key = record.key();
            LOG.info("consumer record key:{}", key);
            Optional<String> optional = Optional.ofNullable(record.value());
            if (!optional.isPresent()) {
                LOG.warn("消息不存在");
            }
            String message = optional.get();
            ObjectMapper mapper = new ObjectMapper();
            try {
                Object user = mapper.readValue(message, Object.class);
                LOG.info("assign partition  mojo-topic:{}", user);
            } catch (JsonProcessingException e) {
                throw new RuntimeException(e);
            }
            consumer.commitAsync();

        }
    }

    @GetMapping("/make")
    public void makeException() {
        for (int i = 1; i <= 10; i++) {
            User user = new User();
            user.setAge(i);
            user.setName("分身" + i);
            ObjectMapper mapper = new ObjectMapper();
            String userStr;
            try {
                userStr = mapper.writeValueAsString(user);
            } catch (JsonProcessingException e) {
                throw new RuntimeException(e);
            }
            kafkaTemplate.send("exception",  userStr).addCallback(new ListenableFutureCallback<SendResult<String,
                    Object>>() {
                @Override
                public void onFailure(Throwable ex) {
                    throw new RuntimeException(ex);
                }

                @Override
                public void onSuccess(SendResult<String, Object> result) {
                    RecordMetadata metadata = result.getRecordMetadata();
                    LOG.info("partition分区：{},offset：{}", metadata.partition(), metadata.offset());
                }
            });
        }
    }

    //@RetryableTopic(
    //        attempts = "5",
    //        backoff = @Backoff(delay = 100, maxDelay = 1000)
    //)

    //独立消费者
    //@KafkaListener(topics = {"exception"}, id = "standaloneGroup", concurrency = "3")

    //只有id没有groupId,会覆盖配置文件中的groupId
    //消费者组
    @KafkaListener(topicPartitions =
    @TopicPartition(topic = "exception", partitions = {"0"}), groupId = "defaultConsumerGroup", containerFactory = "batchConsumerFactory")
    public void exception(List<ConsumerRecord<String, String>> recordList) {
        //public void exception(String record) {
        LOG.info(">>>>>>>>>>>>>>>>");

        for (ConsumerRecord<String, String> record : recordList) {
            String groupId = KafkaUtils.getConsumerGroupId();
            LOG.info("group id:{}", groupId);

            LOG.info("消息主体：{}", record.value());
            try {
                User user = new ObjectMapper().readValue(record.value(), User.class);
                if (user.getAge() % 15 == 0) {
                    LOG.info("第几次重试：{}", n);
                    n++;
                    throw new RuntimeException("消费异常:" + user);
                }
            } catch (Exception e) {
                LOG.error("消费异常：{}", e.getMessage());
                throw new RuntimeException(e);
            }
        }
        //ack.acknowledge();

    }

    //@KafkaListener(topics = "exception.DLT", groupId = "KafkaMessListener")
    //public void deleteLetterQueue(String recordList) {
    //    LOG.info("=============================================:{}", recordList);
    //for (ConsumerRecord<String, String> record : recordList) {
    //    LOG.info("死信队列消费 ===> record:{}", record.value());
    //}
    //}

    @KafkaListener(topicPartitions = @TopicPartition(topic = "exception.DLT", partitions = {"0"}), groupId =
            "KafkaMessListener")
    public void deleteLetterQueue(List<ConsumerRecord<String, String>> recordList) {
        LOG.info("=============================================:{}", recordList);
        for (ConsumerRecord<String, String> record : recordList) {
            LOG.info("死信队列消费 ===> record:{}", record.value());
        }
    }
}
