package com.corn.kafkastream.demo;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.*;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.Printed;

import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.stream.Collectors;

/**
 * @author : Jim Wu
 * @version 1.0
 * @function :
 * @since : 2022/9/14 15:50
 */

public class FlatMapApiTest {
    private static final String BOOTSTRAP_SERVER = "node1:9092";

    private static final String INPUT_TOPIC = "stream.input";

    private static final String OUTPUT_TOPIC = "stream.output";

    private static final String APP_ID = "FLAT_MAP_APP_ID";

    public static void main(String[] args) throws InterruptedException {
        // 1. 创建配置
        Properties properties = new Properties();
        properties.put(StreamsConfig.APPLICATION_ID_CONFIG, APP_ID);
        properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVER);
        // 2. 创建kafka streams builder
        StreamsBuilder builder = new StreamsBuilder();
        builder.stream(INPUT_TOPIC, Consumed.with(Serdes.String(), Serdes.String()).withOffsetResetPolicy(Topology.AutoOffsetReset.LATEST).withName("input-stream"))
                .flatMap((k, v) -> {
                    String[] words = v.split(" ");
                    return Arrays.stream(words).map(e -> KeyValue.pair(e, e.length())).collect(Collectors.toList());
                })
                .print(Printed.toSysOut());
        KafkaStreams streams = new KafkaStreams(builder.build(), properties);

        CountDownLatch countDownLatch = new CountDownLatch(1);
        streams.start();
        countDownLatch.await();
        Runtime.getRuntime().addShutdownHook(new Thread(() -> {
            streams.close();
            countDownLatch.countDown();
        }));

    }
}
