package uiao.com.stream;

import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.*;

import java.time.Duration;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.function.Function;

public class DSLDemo {
    public static void main(String[] args) {

        // cluster, serializer, security and so on
        Properties props = new Properties();
        props.putIfAbsent(StreamsConfig.APPLICATION_ID_CONFIG, "streams-test-foreign");
        props.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        props.putIfAbsent(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
        props.putIfAbsent(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());

        final StreamsBuilder builder = new StreamsBuilder();

        // TODO Topic -> KStream
        // KStream<String, String> source = builder.stream("words01");

        // TODO Topic -> KTable
        // KStream<String, String> source1 = builder.stream("words02");


        // TODO 一. Stateless Transformations


//        KStream<String, String> left = builder.stream("join01");
//        KStream<String, String> right = builder.stream("join02");

        // TODO Branch
//        source.flatMapValues(new ValueMapper<String, Iterable<String>>() {
//            @Override
//            public Iterable<String> apply(String value) {
//                return Arrays.asList(value.split("\\W+"));
//            }
//        }).to("streams-linesplit-output");

//        Map<String,KStream<String,String>> map = source.split(Named.as("mybranch"))
//                .branch((key,value) -> key.startsWith("a"), Branched.as("a"))
//                .branch((key,value) -> key.startsWith("b"), Branched.as("b"))
//                .defaultBranch(Branched.as("c"));
//
//        map.entrySet().forEach(e -> e.getValue().to("x"));

          // TODO Filter
//        KStream<String,String> aa = source.filter((key,value) -> value.startsWith("bb"));
//        aa.to("y");

          // TODO FlatMap
//        KStream<String, Integer> trff = source.flatMap(
//                // Here, we generate two output records for each input record.
//                // We also change the key and value types.
//                // Example: (345L, "Hello") -> ("HELLO", 1000), ("hello", 9000)
//                (key, value) -> {
//                    List<KeyValue<String, Integer>> result = new LinkedList<>();
//                    result.add(KeyValue.pair(key.toUpperCase(), 1000));
//                    result.add(KeyValue.pair(key.toLowerCase(), 9000));
//                    return result;
//                }
//        );

          // TODO Foreach
//        source.foreach((key, value) -> System.out.println(key + " => " + value));


        // Group by the existing key, using the application's configured
        // default serdes for keys and values.
//        KGroupedStream<String, String> groupedStream = source.groupByKey();

          // TODO GroupByKey
// When the key and/or value types do not match the configured
// default serdes, we must explicitly specify serdes.
//        KGroupedStream<String, String> groupedStream1 = source.groupByKey(
//                Grouped.with(
//                        Serdes.String(), /* key */
//                        Serdes.String())     /* value */
//        );


          // TODO Map
//        KStream<String, Integer> transformed = source.map(
//                (key, value) -> KeyValue.pair(value.toLowerCase() + "gggg", value.length()));
//        transformed.to("xyz");
//
//        transformed.print(Printed.toSysOut());


//        KStream<String, String> transformed1 = source.map(
//                (key, value) -> KeyValue.pair(key.toLowerCase() + "gggg", value));
//        transformed1.to("xyz");
//
        // TODO Merge
//        KStream<String, String> merge = source.merge(transformed1);
//        merge.to("xyzd");

        // TODO Peek

        // TODO Print
//        impressionsAndClicks.to("streams-ch-output05");
//        impressionsAndClicks.print(Printed.toSysOut());


        // TODO SelectKey
//        KStream<String, String> rekeyed = source.selectKey((key, value) -> value.split(" ")[0]);


        // TODO 二.Stateful Transformations

        // TODO Aggregate
        // 按照key 分组
//        KGroupedStream<String, String> groupedStream = source1.groupByKey();
//        KGroupedStream<String, Long> groupedStream1 = source2.groupByKey();

//        KGroupedTable<String, String> groupedTable = source1.toTable().groupBy(
//                (key, value) -> KeyValue.pair(value, value),
//                Grouped.with(
//                        Serdes.String(), /* key (note: type was modified) */
//                        Serdes.String()) /* value (note: type was modified) */
//  );
//

//        // Aggregating a KGroupedStream (note how the value type changes from String to Long)
//        KTable<String, String> aggregatedStream = groupedStream.aggregate(
//                () -> "haha", /* initializer */
//                // key，原来的value，新的value
//                (aggKey, oldV, newV) -> {
//                    System.out.println(aggKey + "-" + oldV + "-" + newV);
//                    return oldV + newV;
//                }, /* adder */
//                Materialized.as("aggregated-stream-store")
//                ); /* serde for aggregate value */
//
//        source1.toTable();
//
//        // Aggregating a KGroupedTable (note how the value type changes from String to Long)
//        KTable<String, String> aggregatedTable = groupedTable.aggregate(
//                () -> "agest", /* initializer */
//                (aggKey, newValue, aggValue) -> {
//                    return "addr" + aggValue;
//                },
//                (aggKey, oldValue, aggValue) -> {
//                    return "hi" + aggValue;
//                },
//                Materialized.as("aggregated-table-store")); /* serde for aggregate value */


        // Aggregating with time-based windowing (here: with 5-minute tumbling windows)
//        KTable<Windowed<String>, String> timeWindowedAggregatedStream = groupedStream.windowedBy(SlidingWindows.ofTimeDifferenceWithNoGrace(Duration.ofSeconds(5)))
//                .aggregate(
//                        () -> "window hah", /* initializer */
//                        (aggKey, newValue, aggValue) -> {
//                            System.out.println(aggKey + newValue + aggValue);
//                            return aggValue + newValue;
//                        }, /* adder */
//                        Materialized.<String, String, WindowStore<Bytes, byte[]>>as("time-windowed-aggregated-stream-store") /* state store name */
//                                .withValueSerde(Serdes.String())); /* serde for aggregate value */

          // TODO Count
//        groupedStream.count().toStream(Named.as("yijishiwo")).to("yeyyssysyy");

//        groupedStream.count().toStream();
//
//        // Counting a KGroupedStream with time-based windowing (here: with 5-minute tumbling windows)
//        KTable<Windowed<String>, Long> aggregatedStream = groupedStream.windowedBy(
//                        TimeWindows.ofSizeWithNoGrace(Duration.ofMinutes(5))) /* time-based window */
//                .count();


        // TODO Reduce

        // Reducing a KGroupedStream
//        KTable<String, Long> sumKtable = groupedStream1.reduce(
//                // aggValue 老值 newValue 新知
//                // 求和操作
//                (aggValue, newValue) -> {
//                    System.out.println( aggValue + " _ " + newValue);
//                    return aggValue + newValue;
//                } /* adder */);
//
//        // Reducing a KGroupedStream
//        KTable<String, Long> maxKtable = groupedStream1.reduce(
//                // aggValue 老值 newValue 新知
//                // 求和操作
//                (aggValue, newValue) -> {
//                    System.out.println( aggValue + " _ " + newValue);
//                    return Math.max(aggValue, newValue);
//                } /* adder */);
//
//        sumKtable.toStream().to("nihaosum", Produced.with(Serdes.String(), Serdes.Long()));
//        maxKtable.toStream().to("nihaomax", Produced.with(Serdes.String(), Serdes.Long()));

        // Aggregating with time-based windowing (here: with 5-minute tumbling windows)
//        KTable<Windowed<String>, Long> timeWindowedAggregatedStream = groupedStream1.windowedBy(
//                        TimeWindows.ofSizeWithNoGrace(Duration.ofSeconds(10)) /* time-based window */)
//                .reduce(
//                        (aggValue, newValue) -> {
//                            System.out.println(aggValue + " ---" + newValue);
//                            return aggValue + newValue; /* adder */
//                        }
//                );
//
//
//        timeWindowedAggregatedStream.toStream().to("streams-wordcount-output023456u7");


        // TODO Join

        // TODO KStream-KStream Join
        // Java 8+ example, using lambda expressions
//        KStream<String, String> all = left.outerJoin(
//                right,
//                (leftValue, rightValue) -> "left=" + leftValue + ", right=" + rightValue /* ValueJoiner */,
//                // KStream-KStream joins are always windowed joins, hence we must provide a join window.
//                JoinWindows.of(Duration.ofSeconds(5)),
//                // In this specific example, we don't need to define join serdes explicitly because the key, left value, and
//                // right value are all of type String, which matches our default serdes configured for the application.  However,
//                // we want to showcase the use of `StreamJoined.with(...)` in case your code needs a different type setup.
//                StreamJoined.with(
//                        Serdes.String(), /* key */
//                        Serdes.String(), /* left value */
//                        Serdes.String()  /* right value */
//                )
//        );

        // TODO KTable-KTable Equi-Join


        // TODO KTable-KTable Foreign-Key Join

        KTable<String, String> foreignLeft = builder.table("foreign-left", Consumed.as("myleft"));
        KTable<String, String> foreignRight = builder.table("foreign-right");

// Java 8+ example, using lambda expressions
        KTable<String, String> joined = foreignLeft.join(foreignRight,
                (leftValue) -> leftValue + "-last",
                (leftValue, rightValue) -> "left=" + leftValue + ", right=" + rightValue /* ValueJoiner */,
                Materialized.as("mytable") // 必须有
        );

        joined.toStream().to("wulibiao", Produced.with(Serdes.String(), Serdes.String()));

        // foreign join 涉及所有topic梳理如下
        // left topic -> state-store-changelog(foreign-left)
        // right topic -> state-store-changelog(foreign-right)
        // left topology -> registration-topic
        // right topology -> response-topic
        // right topology -> state-store-changelog
        // left topology -> ktable-changelog(mytable)
        // left topology -> output topic(wulibiao)


        // TODO KStream-KTable Join

//        final KStream<String, String> impressionsAndClicks = left.join(
//                right,
//                (impressionValue, clickValue) ->{
//                    System.out.println(clickValue + "---" + impressionValue);
//                    return (clickValue == null)? impressionValue + "/not-clicked-yet": impressionValue + "/" + clickValue;
//                },
//                // KStream-KStream joins are always windowed joins, hence we must provide a join window.
//                JoinWindows.of(Duration.ofSeconds(10)),
//                // In this specific example, we don't need to define join serdes explicitly because the key, left value, and
//                // right value are all of type String, which matches our default serdes configured for the application.  However,
//                // we want to showcase the use of `StreamJoined.with(...)` in case your code needs a different type setup.
//                StreamJoined.with(
//                        Serdes.String(), /* key */
//                        Serdes.String(), /* left value */
//                        Serdes.String()  /* right value */
//                )
//        );


        // TODO using Processor API
        final Topology topology = builder.build();
        final KafkaStreams streams = new KafkaStreams(topology, props);
        final CountDownLatch latch = new CountDownLatch(1);

        System.out.println(topology.describe());

        // attach shutdown handler to catch control-c
        Runtime.getRuntime().addShutdownHook(new Thread("streams-shutdown-hook") {
            @Override
            public void run() {
                streams.close();
                latch.countDown();
            }
        });

        try {
            streams.start();
            // countDown -> await ->
            latch.await();
        } catch (Throwable throwable) {
            System.exit(1);
        }

        System.exit(0);
    }
}
