package com.heima.boot.config;

import lombok.Getter;
import lombok.Setter;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.*;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafkaStreams;
import org.springframework.kafka.annotation.KafkaStreamsDefaultConfiguration;
import org.springframework.kafka.config.KafkaStreamsConfiguration;

import java.time.Duration;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;

/**
 * 通过重新注册KafkaStreamsConfiguration对象，设置自定配置参数
 */

@Setter
@Getter
@Configuration
@EnableKafkaStreams
@ConfigurationProperties(prefix="kafka")
public class KafkaStreamConfig {
    private static final int MAX_MESSAGE_SIZE = 16* 1024 * 1024;
    private String hosts;
    private String group;
    @Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME)
    public KafkaStreamsConfiguration defaultKafkaStreamsConfig() {
        Map<String, Object> props = new HashMap<>();
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, hosts);
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, this.getGroup()+"_stream_aid");
        props.put(StreamsConfig.CLIENT_ID_CONFIG, this.getGroup()+"_stream_cid");
        props.put(StreamsConfig.RETRIES_CONFIG, 10);
        props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        return new KafkaStreamsConfiguration(props);
    }

    /**
     * 流式处理的业务
     * @param streamsBuilder
     * @return
     */
    @Bean
    public KStream<String,String> kStream(StreamsBuilder streamsBuilder){
        /**
         *  1.接收消息
         *  key=key-first value="hello kafka"   "hello itcast"  "hello kafka"   "hello itcast"   "hello kafka"
         */
        KStream<String, String> kStream = streamsBuilder.stream("topic_kafka_start");
        //2.处理消息
        kStream.flatMapValues(new ValueMapper<String, Iterable<String>>() {
            /**
             * 初始化操作
             * @param value ="hello kafka"   "hello itcast"  "hello kafka"   "hello itcast"   "hello kafka"
             * @return  value=["hello","kafka","hello","itcast","hello","kafka","hello","itcast"]
             */
            @Override
            public Iterable<String> apply(String value) {
                return Arrays.asList(value.split(" "));
            }
        })
                /**
                 * 分组
                 * key=表示还是发送消息的key=key-first
                 * value=["hello","kafka","hello","itcast","hello","kafka","hello","itcast"]
                 * 分完组之后的效果，hello是一组，kafka是一组，itcast是一组
                 */
                .groupBy((key,value)-> value)
                /**
                 * 事件时间窗口，每隔10s处理一次请求
                 */
                .windowedBy(TimeWindows.of(Duration.ofMillis(10000)))
                /**
                 * 求和，返回值是KTable
                 * hello=5,kafka=3,itcast=2
                 */
                .count()
                /**
                 * 转对象为KStream类型
                 */
                .toStream()
                /**
                 * 准备发送处理之后的消息
                 * 此时的key=
                 *             hello,kafka,itcast
                 * 此时value =   5  , 3   ,2
                 */
                .map(new KeyValueMapper<Windowed<String>, Long, KeyValue<String, String>>() {
                    @Override
                    public KeyValue<String, String> apply(Windowed<String> key, Long value) {
                        return new KeyValue<>(key.key().toString(),value.toString());
                    }
                })
                //3.发送消息
                .to("topic_kafka_end");
        return  kStream;
    }
}