package com.corn.kafkastream.demo;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.Named;
import org.apache.kafka.streams.kstream.Produced;

import java.util.Properties;
import java.util.concurrent.CountDownLatch;

/**
 * @author : Jim Wu
 * @version 1.0
 * @function :
 * @since : 2022/9/13 15:13
 */
@Slf4j
public class FirstKafkaSteamApp {

    private static final String BOOTSTRAP_SERVER = "node1:9092";

    private static final String INPUT_TOPIC = "stream.input";

    private static final String OUTPUT_TOPIC = "stream.output";

    private static final String APP_ID = "FIRST_KAFKA_STREAM_APP_ID";

    public static void main(String[] args) throws InterruptedException {
        // 1. 创建配置
        Properties properties = new Properties();
        properties.put(StreamsConfig.APPLICATION_ID_CONFIG, APP_ID);
        properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVER);
//        properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String());
//        properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String());
        // 2. 创建builder
        StreamsBuilder builder = new StreamsBuilder();
        // 设置INPUT TOPIC
        builder.stream(INPUT_TOPIC, Consumed.with(Serdes.String(), Serdes.String()).withName("source-processor"))
                // 打印 输入数据
                .peek((k, v) -> log.info("value = {}", v))
                // 过滤字符串长度小于5的数据
                .filter((k, v) -> null != v && v.length() > 5, Named.as("filter-word-processor"))
                .mapValues(v -> v.toUpperCase(), Named.as("uppercase-processor"))
                .peek((k, v) -> log.info("after upperProcessor value= {}", v))
                .to(OUTPUT_TOPIC, Produced.with(Serdes.String(), Serdes.String()).withName("sink-processor"));
        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), properties);

        CountDownLatch countDownLatch = new CountDownLatch(1);
        Runtime.getRuntime().addShutdownHook(new Thread(() -> {
            kafkaStreams.close();
            countDownLatch.countDown();
        }));

        kafkaStreams.start();

        countDownLatch.await();
    }
}
