package com.itheima.kafka.simple;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.*;

import java.util.Arrays;

import java.util.Properties;

/**
 * 统计消息中hello出现的次数
 */
public class KafkaStreamSample {

   // private static final String INPUT_TOPIC="article_behavior_input";
    private static final String INPUT_TOPIC="itcast-heima";
   private static final String OUT_TOPIC="article_behavior_out";
   // private static final String OUT_TOPIC="itcast-heima";

    public static void main(String[] args) {
        Properties props = new Properties();
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,"139.9.37.235:9092");
        props.put(StreamsConfig.APPLICATION_ID_CONFIG,"article_behavior_count");
        props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

        StreamsBuilder builder=new StreamsBuilder();
        //构建过滤条件
        group(builder);
        KafkaStreams streams = new KafkaStreams(builder.build(),props);
        //开启实时流
        streams.start();
    }

    //流计算
    private static void group(StreamsBuilder builder) {

        /**
         * 第一个参数：消费的消息名称
         * 第二个参数：LATEST  最近的   EARLIEST 更早的
         */
        KStream<String, String> stream = builder.stream(INPUT_TOPIC);

        stream.flatMapValues( s-> Arrays.asList(s.split(" ")))
                .map( (s,s2)->  new KeyValue<>(s2,s2))
                .groupByKey()
                .windowedBy(TimeWindows.of(1000*60))
                .count().toStream()
               .map( (stringWindowed,aLong)-> new KeyValue<>(stringWindowed.key(),aLong+""))
                .to(OUT_TOPIC);
    }
}
