package com.itheima.kafka.stream;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.apache.kafka.streams.kstream.TimeWindows;
import org.apache.kafka.streams.kstream.ValueMapper;
import org.apache.kafka.streams.kstream.Windowed;

import java.util.Arrays;
import java.util.Properties;

/**
 * @Description:  处理实时数据流
 * 需求：计算单词出现次数
 * @Version: V1.0
 *
 * 如：接收消息—— 数据源 from
 *
 * * hello kafka streams
 * * hello heima kafka
 * * hello shanghai heima kafka
 *
 * KafkaStreamFastStart 实时流式处理
 *
 * 结果输出： to 结果
 *
 * *     hello： 3
 * *     kafka： 3
 * *     streams：1
 * *     heima： 2
 * *     shanghai： 1
 */
public class KafkaStreamFastStart {


    public static void main(String[] args) {
        //1 kafka配置信息
        Properties prop = new Properties();
        prop.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.200.129:9092");
        prop.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        prop.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        prop.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-sample");


        //2 stream构建器
        StreamsBuilder builder = new StreamsBuilder();

        // 实时流式计算
        streamProcessor(builder);

        //3 创建 kafkaStreams
        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), prop);

        //4 开启实时流
        kafkaStreams.start();
    }

    /**
     * 处理器
     * 1、接收单词  from topic
     * 2、处理：去空格 保存表
     * 3、处理结果发送新的topic
     * @param builder
     */
    private static void streamProcessor(StreamsBuilder builder) {

        KStream<String, String> kStream = builder.stream("itcast-topic-input");// 输入流  实时无界

        // 处理数据
        kStream.flatMapValues(new ValueMapper<String, Iterable<String>>() {
            @Override
            public Iterable<String> apply(String value) { // 切分
                // hello kafka streams  接收数据  hello heima kafka
                System.out.println("接收待处理数据： " + value);
                // 处理数据 0: hello 1: kfk  2: streams 3: hello 4:heima ......
                return Arrays.asList(value.split(" ")  );
            }
        })
                .groupBy(new KeyValueMapper<String, String, String>() {  // 分组
                    @Override
                    public String apply(String key, String value) {
                        System.out.println("k: " + key + "----- v:" + value);
                        return value;
                    }
                })
                .windowedBy(TimeWindows.of(5000))  // 时间窗口 逻辑上：无界 --> 有界
                .count()
                .toStream()
                .map(new KeyValueMapper<Windowed<String>, Long, KeyValue<String, String>>() {
                    @Override
                    public KeyValue<String, String> apply(Windowed<String> stringWindowed, Long aLong) {
                        // 参数1：分组的处理之后的key和value 如：hello kafka
                        // 参数2：处理后结果数据
                        System.out.println("处理之后数据： k="+stringWindowed.key().toString() + ":"+aLong);
                        return new KeyValue<String,String>(stringWindowed.key().toString(), aLong.toString());
                    }
                })
                .to("itcast-topic-output");   // 结果输出到新的 topic

    }

}
