package com.itheima.kafka.simple;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.*;
import org.apache.kafka.streams.kstream.internals.MaterializedInternal;
import org.apache.kafka.streams.state.WindowStore;

import java.util.Arrays;
import java.util.Properties;

/**
 * 需求：
 *  接收kafka消息内容并计算消息内单词的个数
 *  如：
 *      hello kafka stareams
 *      hello heima kafka
 *      hello beijing heima kafka
 *  结果：
 *      hello ： 3
 *      kafka :  3
 *      stareams: 1
 *      heima : 2
 *      beijing : 1
 */
public class KafkaStreamFastStart {

    public static void main(String[] args) {

        //kafka配置信息
        Properties prop = new Properties();
        prop.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.14.37:9092");
        prop.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        prop.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        prop.put(StreamsConfig.APPLICATION_ID_CONFIG,"streams-faststart");

        //stream构建器
        StreamsBuilder builder = new StreamsBuilder();

        //流式计算
        group(builder);

        //创建kafkaStream对象
        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),prop);

        //开启stream流计算
        kafkaStreams.start();
    }

    /**
     * 流式计算
     *
     * hello kafka stareams
     * hello heima kafka
     * hello beijing heima kafka
     * @param builder
     */
    private static void group(StreamsBuilder builder) {
        //接收消息
        KStream<String, String> stream = builder.stream("input_topic");

        //计算
        stream.flatMapValues(new ValueMapper<String, Iterable<String>>() {
            /**
             * 把消息中的词组，转换为一个一个的单词放到集合中
             * @param value
             * @return
             */
            @Override
            public Iterable<String> apply(String value) {
                System.out.println("消息的value:"+value);//hello kafka stareams
                String[] strings = value.split(" ");
                return Arrays.asList(strings);
            }
        }).map(new KeyValueMapper<String, String, KeyValue<String, String>>() {
            /**
             * 把消息的key,重新赋值，目前消息的key就是单词
             * @param key
             * @param value
             * @return
             */
            @Override
            public KeyValue<String, String> apply(String key, String value) {
                return new KeyValue<>(value,value);
            }
        }).groupByKey()
                //时间聚合窗口
                .windowedBy(TimeWindows.of(5000))
                //消息的value就是聚合单词后的统计数值，long类型
                .count(Materialized.as("count-word-num-0001"))
                //转换为Kstream
                .toStream()
                //把处理后的key和value转换String
                .map((key,value)->{
                    return new KeyValue<>(key.key().toString(),value.toString());
                })
                //发送消息
                .to("out_topic");


    }
}
