package com.usian.kafka.stream;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.*;

import java.util.Arrays;
import java.util.Properties;

public class WordCountApplication {
    private static final String INPUT_TOPIC = "word-count-stream-topic";
    private static final String OUT_TOPIC= "word-count-stream-consumer";
    private static final String KAFKA_IP = "192.168.6.139:9092";

    public static void main(String[] args) {
        Properties props = new Properties();
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-application");
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_IP);
        props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
//       流构建器
        StreamsBuilder builder = new StreamsBuilder();
        //       设置获取数据的topic，接收消息
        KStream<String, String> stream = builder.stream(INPUT_TOPIC);
//       计算,只处理value 原始数据：hello kafka usian ,处理后返回 [hello ,kafka,stream]
        stream.flatMapValues(new ValueMapper<String, Iterable<String>>() {
            @Override
            public Iterable<String> apply(String value) {
                String[] split = value.split(" ");
                return Arrays.asList(split);
            }
        }).map(new KeyValueMapper<String, String, KeyValue<String, String>>() {
            //继续转换 hello:hello   kafka:kafka
            @Override
            public KeyValue<String, String> apply(String key, String value) {
                return new KeyValue<>(value, value);
            }
        }).groupByKey().count(Materialized.as("testwordcount"))//分组统计
                .mapValues(new ValueMapperWithKey<String, Long, String>() {
                    @Override
                    public String apply(String readOnlyKey, Long value) {
                        System.out.println("readOnlyKey=="+readOnlyKey);
                        System.out.println("value=="+value);
                        //返回统计的个数
                        return value.toString();
                    }
                }).toStream().to(OUT_TOPIC);
        KafkaStreams streams = new KafkaStreams(builder.build(),props);
        streams.start();
    }
}
