package com.corn.kafkastream.demo;

import cn.hutool.core.util.StrUtil;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.*;
import org.apache.kafka.streams.kstream.*;
import org.apache.kafka.streams.processor.ProcessorContext;
import org.apache.kafka.streams.processor.StateStore;
import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.StoreBuilder;
import org.apache.kafka.streams.state.Stores;
import org.apache.kafka.streams.state.internals.KeyValueStoreBuilder;

import java.io.StringReader;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.stream.Collectors;

/**
 * @author : Jim Wu
 * @version 1.0
 * @function :
 * @since : 2022/9/22 15:04
 */

public class WordCountDemo {

    private static final String BOOTSTRAP_SERVER = "node1:9092";

    private static final String INPUT_TOPIC = "stream.input";

    private static final String OUTPUT_TOPIC = "stream.output";

    private static final String APP_ID = "STATE_STORE_DEMO";

    private static final String STATE_STORE = "MY_WORD_COUNT_STATE_STORE";

    private static final String STATE_STORE_DIR = "C:\\java_project\\intergration-learning-notes\\kafka-streams-tutorial\\state_store";

    public static void main(String[] args) throws InterruptedException {
        // 注意各个环境的Serdes!!!!!!!!!!!!!

        // 1. 创建配置
        Properties properties = new Properties();
        properties.put(StreamsConfig.APPLICATION_ID_CONFIG, APP_ID);
        properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVER);
        // 定义stateStore dir 本地磁盘地址
        properties.put(StreamsConfig.STATE_DIR_CONFIG, STATE_STORE_DIR);
        properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        // 1.1 store
        StoreBuilder storeBuilder = Stores.keyValueStoreBuilder(
                Stores.persistentKeyValueStore(STATE_STORE),
                Serdes.String(),
                // 单词数
                Serdes.Integer()
        );
        // 2. stream builder
        StreamsBuilder builder = new StreamsBuilder();
        // add state store
        builder.addStateStore(storeBuilder);
        builder.stream(INPUT_TOPIC, Consumed.with(Serdes.String(), Serdes.String()).withName("INPUT-DATA").withOffsetResetPolicy(Topology.AutoOffsetReset.EARLIEST))
                .filter((k, v) -> StrUtil.isNotBlank(v))
                .flatMap((k, v) -> {
                    String[] words = v.split(" ");
                    return Arrays.stream(words).map(e -> KeyValue.pair(e, 1)).collect(Collectors.toList());
                })
//                .selectKey((k, v) -> KeyValue.pair(v, 1))
                .peek((k, v) -> {
                    System.out.println("v = " + v + " k = " + k);
                })
                .repartition(Repartitioned.with(Serdes.String(),Serdes.Integer()).withName("WORD-COUNT-REPARTITION"))
                .transform(() -> {
                    Transformer transformer = new Transformer<String, Integer, KeyValue<String, Integer>>() {
                        private KeyValueStore<String, Integer> stateStore;

                        @Override
                        public void init(ProcessorContext context) {
                            this.stateStore = (KeyValueStore<String, Integer>) context.getStateStore(STATE_STORE);
                        }

                        @Override
                        public KeyValue<String, Integer> transform(String key, Integer value) {
                            Integer count = stateStore.get(key);
                            if (null == count || count == 0) {
                                count = 1;
                            } else {
                                count += 1;
                            }
                            stateStore.put(key, count);
                            return KeyValue.pair(key, count);
                        }

                        @Override
                        public void close() {

                        }
                    };
                    return transformer;
                }, Named.as("TRANS-WORD-COUNT"), STATE_STORE)
                .peek((k, v) -> {
                    System.out.println("k = " + k + " v = " + v);
                })
                .print(Printed.toSysOut());
//                .to(OUTPUT_TOPIC, Produced.with(Serdes.String(), Serdes.Integer()).withName("WORD_COUNT_SINK"));


        KafkaStreams streams = new KafkaStreams(builder.build(), properties);

        CountDownLatch countDownLatch = new CountDownLatch(1);
        streams.start();
//        countDownLatch.await();
//
//        Runtime.getRuntime().addShutdownHook(new Thread(() -> {
//            streams.close();
//            countDownLatch.countDown();
//        }));

    }
}
