package com.heima.kafka.simple;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.*;
import org.apache.kafka.streams.kstream.*;

import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;

/**
 * 流开始
 *
 * @author DiXiaoNie
 * @date 2023/01/07
 * @since 2023/1/7 10:17
 */

public class StreamStart {
    public static void main(String[] args) {

        //2.设置kafka配置信息
        Properties properties = new Properties();
        //2.1 kafka地址
        properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.56.130:9092");
        properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        //必加
        properties.put(StreamsConfig.APPLICATION_ID_CONFIG, "id1");
        //3.创建拓补图对象
        StreamsBuilder streamsBuilder = new StreamsBuilder();

        //4.进行流式计算
        streamProcessor(streamsBuilder);

        Topology topology = streamsBuilder.build();
        //1.创建kafkastream对象

        KafkaStreams kafkaStreams = new KafkaStreams(topology, properties);

        //5.开启执行流式计算
        kafkaStreams.start();
    }

    /**
     * 流处理器
     *
     * @param streamsBuilder 流生成器
     * @author DiXiaoNie
     * @date 2023/01/07
     */
    private static void streamProcessor(StreamsBuilder streamsBuilder) {
        //1.从kafka内部获取消息内容
        KStream<String, String> stream = streamsBuilder.stream("kafka-stream-producer");
        //2.进行流式计算  value 表示生产者发送到kafka的消息
        //2.处理消息
        stream.flatMapValues(new ValueMapper<String, Iterable<String>>() {
            /**
             * 接收的消息：
             *      key=first0  value=hello kafka
             *      key=first1  value=hello itcast
             *
             * @param value
             * @return ["hello","kafka","hello","itcast",....]
             */
            @Override
            public Iterable<String> apply(String value) {
                return Arrays.asList(value.split(" "));
            }
        })
                /**
                 * 表示分组聚合，key=first0 first1 ,....
                 * 此时的value就是 ["hello","kafka","hello","itcast",....]
                 *          分组之后就会出现三组，以hello一组，kafka一组，itcast一组
                 */
                .groupBy((key, value) -> value)//根据什么分组
                .windowedBy(TimeWindows.of(Duration.ofSeconds(10)))//每10s拉取一次消息内容
                .count()//求和，返回值是ktable类型
                .toStream()//把ktable类型转化成kstream类型
                /**
                 * 发送的消息内容
                 * key=hello  ,kafka, itcast
                 * value=2     2            2
                 */
                .map((KeyValueMapper<Windowed<String>, Long, KeyValue<String, String>>) (key, value) -> {
                    return new KeyValue<>(key.key().toString(), value.toString());
                })
                //第三步，发送消息，消息内容,消费者监听的topic

                .to("kafka-stream-consumer");
    }


    /*    new ValueMapper<String, Iterable<String>>() {
     *//**
     * 应用
     *
     * @param value 表示生产者发送到kafka的消息
     * @return {@link Iterable }<{@link String }>
     * @author DiXiaoNie
     * @date 2023/01/07
     *//*
        @Override
        public Iterable<String> apply(String value) {

            return Arrays.asList(value.split(" "));
        }
    }*/


/*    new KeyValueMapper<Windowed<String>, Long, KeyValue<String, String>>() {
        @Override
        public KeyValue<String, String> apply(Windowed<String> key, Long value) {
            return null;
        }
    }*/
}
