package com.itheima.kafka.simple;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.*;
import org.apache.kafka.streams.kstream.*;
import org.springframework.util.StringUtils;

import java.util.Arrays;
import java.util.List;
import java.util.Properties;

public class KafkaStreamSample {

    public static final String INPUT_TOPIC="input_topic";
    public static final String OUT_TOPIC="out_topic";

    public static void main(String[] args) {
        //添加kafka配置信息
        Properties prop = new Properties();
        prop.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.200.130:9092");
        prop.put(StreamsConfig.APPLICATION_ID_CONFIG,"kafka-stream-sample");
        prop.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        prop.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

        StreamsBuilder builder = new StreamsBuilder();
        //构建过滤的聚合条件
        group(builder);

        KafkaStreams streams = new KafkaStreams(builder.build(),prop);
        streams.start();
    }

    /**
     * 定义流式计算的过程
     * @param builder
     */
    private static void group(StreamsBuilder builder) {
        KStream<String, String> stream = builder.stream(INPUT_TOPIC, Consumed.with(Topology.AutoOffsetReset.LATEST));
        KStream<String, String> map = stream.groupByKey()
                .windowedBy(TimeWindows.of(10000))
                .aggregate(new Initializer<String>() {
                    @Override
                    public String apply() {
                        return "啥也没有";
                    }
                }, new Aggregator<String, String, String>() {
                    /**
                     *
                     * @param key  接收到的消息的key,
                     * @param value 接收到的消息的value
                     * @param aggValue 上面apply的方法返回值
                     * @return
                     */
                    @Override
                    public String apply(String key, String value, String aggValue) {
                        if(StringUtils.isEmpty(value)){
                            return aggValue;
                        }
                        List<String> strings = Arrays.asList(value.toLowerCase().split(","));
                        int count = 0;
                        for (String string : strings) {
                            if(string.equals("hello")){
                                count++;
                            }
                        }
                        return String.format("hello:%d",count);
                    }
                }, Materialized.as("kafka-stream-sample-count"))
                .toStream()
                .map((key, value) -> {
                    return new KeyValue<>(key.key().toString(), value);
                });
        map.to(OUT_TOPIC,Produced.with(Serdes.String(),Serdes.String()));

    }
}
