package com.heima.kafka.simple;


import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.*;
import org.apache.kafka.streams.kstream.*;


import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;

public class KafkaStreamStart {
    public static void main(String[] args) {
        //2.创建配置信息
        Properties props=new Properties();
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,"114.116.122.120:9092");
        props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG,Serdes.String().getClass());
        props.put(StreamsConfig.APPLICATION_ID_CONFIG,"applicationId");

        //3.床架拓扑对象
        StreamsBuilder streamsBuilder=new StreamsBuilder();

        //4.执行流式计算
        jisuanKafkastreams(streamsBuilder);

        Topology topology = streamsBuilder.build();
        /**
         *  1 创建kafkastream对象
         * KafkaStreams(final Topology topology,final Properties props)
         *                         参数1表示拓扑类
         *                                              参数2表示属性配置
         */
        KafkaStreams kafkaStreams=new KafkaStreams(topology,props);

        //5.执行
        kafkaStreams.start();
    }

    /**
     * 流式计算处理的业务
     * 求单词个数（word count）
     *      生成者发送消息，hello itcast
     * @param streamsBuilder
     */
    private static void jisuanKafkastreams(StreamsBuilder streamsBuilder) {
        //1.接收生产者发送的消息
        KStream<String, String> stream = streamsBuilder.stream("kafkastream-topic1");
        //2.处理消息
        stream.flatMapValues(new ValueMapper<String, Iterable<String>>() {
            /**
             * 接收的消息：
             *      key=first0  value=hello kafka
             *      key=first1  value=hello itcast
             *
             * @param value
             * @return  ["hello","kafka","hello","itcast",....]
             */
            @Override
            public Iterable<String> apply(String value) {
                return Arrays.asList(value.split(" "));
            }
        })
                /**
                 * 表示分组聚合，key=first0 first1 ,....
                 * 此时的value就是 ["hello","kafka","hello","itcast",....]
                 *          分组之后就会出现三组，以hello一组，kafka一组，itcast一组
                 */
                .groupBy((key,value)-> value)//根据什么分组
        .windowedBy(TimeWindows.of(Duration.ofSeconds(10)))//每10s拉取一次消息内容
        .count()//求和，返回值是ktable类型
        .toStream()//把ktable类型转化成kstream类型
        /**
         * 发送的消息内容
         * key=hello  ,kafka, itcast
         * value=2     2            2
         */
                .map((KeyValueMapper<Windowed<String>, Long, KeyValue<String, String>>) (key, value) -> {
                    return new KeyValue<>(key.key().toString(),value.toString());
                })
                //第三步，发送消息，消息内容
                .to("kafkastream-toipc2");//发送消息
    }
}
