package com.corn.kafkastream.serdes;

import com.corn.kafkastream.entity.User;
import com.corn.kafkastream.serdes.BusinessJsonSereds;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.*;

import java.util.Properties;
import java.util.concurrent.CountDownLatch;

/**
 * @author : Jim Wu
 * @version 1.0
 * @function :
 * @since : 2022/9/20 16:38
 */
@Slf4j
public class CustomSerdesStreamApp {
    private static final String BOOTSTRAP_SERVER = "node1:9092";

    private static final String INPUT_TOPIC = "USER-INFO-INPUT";

    private static final String APP_ID = "CUSTOM_SERDES_APP_ID";

    private static final String CQ_TOPIC = "CQ_USER_TOPIC";

    private static final String SH_TOPIC = "SH_USER_TOPIC";

    public static void main(String[] args) throws InterruptedException {
        // 1. 创建配置
        Properties properties = new Properties();
        properties.put(StreamsConfig.APPLICATION_ID_CONFIG, APP_ID);
        properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVER);
//        properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String());
//        properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String());
        // 2. 创建builder
        StreamsBuilder builder = new StreamsBuilder();

        builder.stream(INPUT_TOPIC, Consumed.with(Serdes.String(), BusinessJsonSereds.User()).withName("input-stream").withOffsetResetPolicy(Topology.AutoOffsetReset.LATEST))
                // peek
                .peek((k, v) -> log.info("v : {}", v), Named.as("peek-data"))
                // masking
                .mapValues(v -> {
                    v.setBankNo(v.getBankNo().replaceAll("1", "*"));
                    return v;
                }, Named.as("masking-value"))
                .split(Named.as("split-branch"))
                // dispatch CQ topic
                .branch((k, v) -> "cq".equals(v.getAddr()), Branched.withConsumer(ks -> {
                    ks.to(CQ_TOPIC, Produced.with(Serdes.String(), BusinessJsonSereds.User()));
                }))
                // dispatch SH topic
                .branch((k, v) -> "sh".equals(v.getAddr()), Branched.withConsumer(ks -> {
                    ks.to(SH_TOPIC, Produced.with(Serdes.String(), BusinessJsonSereds.User()));
                }));

        KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), properties);
        CountDownLatch countDownLatch = new CountDownLatch(1);
        kafkaStreams.start();
        countDownLatch.await();

        Runtime.getRuntime().addShutdownHook(new Thread(() -> {
            kafkaStreams.close();
            countDownLatch.countDown();
        }));

    }

}
