package com.sunday.common.mq.kafka.study.spring.e19_Streams;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.Properties;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;

/**
 * {@link org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration}
 */
@Slf4j
@RestController
@RequestMapping("/test")
public class MQApi {

    @Autowired
    private KafkaTemplate<Object, Object> kafkaTemplate;

    @Autowired
    private KafkaProperties properties;

    @GetMapping("/send/{topic}/{key}")
    public void send(@PathVariable String topic, @PathVariable String key) throws InterruptedException, ExecutionException {
        CompletableFuture<SendResult<Object, Object>> future;
        future = kafkaTemplate.send(topic, key, key);
        log.info("{}", future.get());
    }

    @GetMapping("/stream")
    public void stream() {

        Properties props = new Properties();
        props.putAll(properties.buildAdminProperties());
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, "sunday-common-mq-kafka");
        props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

        StreamsBuilder builder = new StreamsBuilder();
        builder.<String, String>stream("topic12").mapValues(value -> String.valueOf(value.length())).to("topic13");

        KafkaStreams streams = new KafkaStreams(builder.build(), props);
        streams.start();

    }

    @KafkaListener(
            id = "myId_12",
            topics = "topic12",
            clientIdPrefix = "TEST12"
    )
    public void listen12(ConsumerRecord<String, String> record) {
        log.info("[收到测试消息] message = {}", record.value());
    }

    @KafkaListener(
            id = "myId_13",
            topics = "topic13",
            clientIdPrefix = "TEST13"
    )
    public void listen13(ConsumerRecord<String, String> record) {
        log.info("[收到测试消息] length = {}", record.value());
    }
}
