package top.chenjipdc;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.KTable;

import java.util.Properties;

public class SampleStreams {

    public static void main(String[] args) {

        StreamsWrapper streamsWrapper = new StreamsWrapper("mongo_logs");

        streamsWrapper.start();

    }
}


class StreamsWrapper extends Thread {

    private KafkaStreams streams;

    private final String topic;

    StreamsWrapper(String topic) {
        this.topic = topic;
    }

    @Override
    public synchronized void start() {
        super.start();

        Runtime.getRuntime().addShutdownHook(new Thread(() -> {
            if (streams != null){
                streams.close();
            }
        }));
    }

    @Override
    public void run() {
        Properties properties = new Properties();
        properties.put(StreamsConfig.APPLICATION_ID_CONFIG, "TestStreams");
        properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.1.251:9092");
        properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.StringSerde.class.getName());
        properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.StringSerde.class.getName());

        StreamsBuilder builder = new StreamsBuilder();
        // table => streams
        KTable<String, String> kTable = builder.table(topic);
        kTable.filter((key, value) -> {
            System.out.println("table => key: " + key + " value: " + value);
            return true;
        }).toStream().peek((key, value) ->
                System.out.println("streams => key: " + key + " value: " + value)
        );


        streams = new KafkaStreams(builder.build(), properties);
        streams.start();


    }
}
