package com.sunday.common.mq.kafka.study.spring.e20_Brave_Trace;

import lombok.extern.slf4j.Slf4j;
import org.aopalliance.intercept.MethodInvocation;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;

/**
 * {@link org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration}
 */
@Slf4j
@RestController
@RequestMapping("/test")
public class MQApi {

    @Autowired
    private KafkaTemplate<Object, Object> kafkaTemplate;

    @GetMapping("/send/{topic}/{key}")
    public void send(@PathVariable String topic, @PathVariable String key) throws InterruptedException, ExecutionException {
//        log.info("{}", key);
        CompletableFuture<SendResult<Object, Object>> future;
        future = kafkaTemplate.send(topic, key, key);
        log.info("============结束访问================");
//        future.whenComplete((result, throwable) -> log.info("[whenComplete] result : {}, throwable : {}", result, throwable));
//        future.whenCompleteAsync((result, throwable) -> log.info("[whenCompleteAsync]result : {}, throwable : {}", result, throwable));
        log.info("[get] {}", future.get());
    }

    @GetMapping("/batch")
    public void batch() throws InterruptedException, ExecutionException {
        for (int i = 0; i < 1; i++) {
            CompletableFuture<SendResult<Object, Object>> future;
            future = kafkaTemplate.send("topic15", i + "", i + "");
            log.info("============结束访问================");
//            log.info("[get] {}", future.get());
        }
    }

    @KafkaListener(
            id = "myId_15",
            topics = "topic15",
            clientIdPrefix = "TEST15"
    )
    public void listen15(ConsumerRecord<String, String> record) throws InterruptedException {
        log.info("[listen15][收到测试消息] message = {}", record.value());
        /**
         * 引发问题
         * {@link brave.kafka.clients.TracingConsumer#poll(org.apache.kafka.clients.consumer.ConsumerRecords)}
         * 可以看到 在poll的代码中 kafka的操作并没有与 rabbit brave一样的操作，而是采用 先 extractor 后又 injector的方式将链路转移回了request header中
         * 那么为什么两者出现了差异，{@link brave.spring.rabbit.TracingRabbitListenerAdvice#invoke(MethodInvocation)} rabbit 可以完全链路的绑定，并实现后续业务的贯穿
         * 1. rabbit 和 kafka不通，rabbit 同时存在 push /pull两种模式 而kafka只有pull
         * 2. rabbit pull中不存在批量拉取，而kafka pull batch 可以， 这也是产生了本质的问题。
         * 首先 kafka是批量获取，所以一个链路的 ，不足以代表一系列 tracer.withSpanInScope 的动作，所以封装的时候将 brave的标识还于 header 中，让业务自行决定
         * 3. rocket brave 封装是笔者自行封装，其中rocket也只有 pull的，但是也没有batch，而spring做了优化，让我们可以达到这个目标，
         * 也就是 rocket中也是和kafka面临一个问题，所以在封装 {@link com.sunday.common.mq.rocket.brave.ConsumeMessageBraveTracingHookImpl#consumeMessageBefore(org.apache.rocketmq.client.hook.ConsumeMessageContext) }
         * 的时候，的确欠缺了这方面的考虑，后续将参考 skywalking 尝试改进
         *
         * PS: 这个里面没有打印是因为我 TracingReceiveAspect 中 TracingAfterReceivePostProcessor 调用 kafkaTracing.nextSpan(record) 清除了header中的信息
         *
         */
        String header = HeaderUtils.lastStringHeader(record.headers(), "traceparent");
        TimeUnit.SECONDS.sleep(5);
        log.info("header brave message : {}", header);
    }

    @KafkaListener(
            id = "myId_15_A",
            topics = "topic15",
            clientIdPrefix = "TEST15_A"
    )
    public void listen15_A(String record) throws InterruptedException {
        log.info("[listen15_A][收到测试消息] message = {}", record);
        TimeUnit.SECONDS.sleep(5);
    }

    @KafkaListener(
            id = "myId_16",
            topics = "topic16",
            clientIdPrefix = "TEST16",
            batch = "true",
            containerFactory = "kafkaBatchListenerContainerFactory",
            autoStartup = "false"
    )
    public void listen16(List<String> records) throws InterruptedException {
        log.info("[Batch][收到测试消息] message = {}", records.size());
        TimeUnit.SECONDS.sleep(5);
    }

    @Autowired
    private KafkaListenerEndpointRegistry endpointRegistry;

    @GetMapping("/start")
    public void start() {
        endpointRegistry.getListenerContainer("myId_16").start();
    }

    @GetMapping("/stop")
    public void stop() {
        endpointRegistry.getListenerContainer("myId_16").stop();
    }

}
