package com.heima.kafka.simple;

import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.internals.Sender;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.*;
import org.apache.kafka.streams.kstream.*;

import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;

public class StreamStart {
    public static void main(String[] args) {
        //2.连接kafka
        Properties properties=new Properties();
        //2.1 设置kafka地址
        properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.200.105:9092");
        properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG,Serdes.String().getClass());
        //加入应用id,必加，不加就报错
        properties.put(StreamsConfig.APPLICATION_ID_CONFIG,"id1");
        //3.创建拓补图对象
        StreamsBuilder streamsBuilder=new StreamsBuilder();

        //4.进行流式计算
        streamProcessor(streamsBuilder);

        Topology topology=streamsBuilder.build();
        //1.创建kafkastream对象
        KafkaStreams kafkaStreams=new KafkaStreams(topology,properties);

        //5.开启执行流式计算
        kafkaStreams.start();
    }

    /**
     * 流式计算
     * 求每个单词的个数
     * @param streamsBuilder
     */
    private static void streamProcessor(StreamsBuilder streamsBuilder) {
            //1.从kafka内获取消息内容, hello kafka hello itcast
        KStream<String, String> stream = streamsBuilder.stream("kafka-stream-producer");
        //2.进行流式计算
        stream.flatMapValues(new ValueMapper<String, Iterable<String>>() {
            /**
             *  流式计算接收消息并逻辑业务处理
             * @param value 表示“hello kafka”  ，“hello itcast”
             * @return ["hello","kafka","hello","itcast"]
             */
            @Override
            public Iterable<String> apply(String value) {
                return Arrays.asList(value.split(" "));
            }
        })
                /**
                 * key 表示生产者发送消息的key
                 * value 表示处理之后的消息内容  "hello","kafka","hello","itcast"
                 * 以value进行分组
                 */
                .groupBy((key,value)-> value)//以什么进行分组
                .windowedBy(TimeWindows.of(Duration.ofSeconds(10)))//时间窗口，可以处理晚到的消息，每10s钟处理一次消息
                .count()//求和
                .toStream()//把返回值从ktable转成kstream
                //表示整理处理之后的消息内容进行封装
                .map(new KeyValueMapper<Windowed<String>, Long, KeyValue<String, String>>() {
                    /**
                     * 进行处理之后的消息封装，并发送给kafka
                     * @param key 表示每个字符 hello  itcast
                     * @param value 表示每个字符求和之后的数字
                     * @return
                     */
                    @Override
                    public KeyValue<String, String> apply(Windowed<String> key, Long value) {
                        return new KeyValue<>(key.key(),value.toString());
                    }
                })
                //3.计算之后的结果，发送给kafka,让消费者消费
                .to("kafka-stream-consumer");//表示发送消息给具体的topic
    }
}
