package org.kafka.stream;

import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.Properties;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.KGroupedStream;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.kstream.Produced;
import org.apache.kafka.streams.kstream.ValueMapper;
import org.apache.kafka.streams.state.KeyValueStore;

public class kafkaStream {

    public static void main(final String[] args) throws Exception {
        Properties props = new Properties();
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, "kafkaStream-test");
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9093,localhost:9093");
        props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG,MyTimestampExtractor.class.getName());
        props.put(StreamsConfig.APPLICATION_SERVER_CONFIG,"localhost:9999");

        StreamsBuilder builder = new StreamsBuilder();
        KStream<String, String> stream = builder.stream(
            "test-in"
        );

        //处理节点
//        stream = stream.flatMapValues(new ValueMapper<String, Iterable<String>>() {
//            @Override
//            public Iterable<String> apply(String s) {
//                return Arrays.asList(s.split("\\W+"));
//            }
//        });
        stream.foreach((key,value)->{
            System.out.println("key = [" + key + "]"+"value = [" + value + "]");
        });

        //处理节点
//        KGroupedStream<String,String> groupStream = stream.groupBy(new KeyValueMapper<String, String, String>() {
//            @Override
//            public String apply(String s, String s2) {
//                return s2;
//            }
//        });
//        KTable<String,Long> kTable = groupStream.count(Materialized.<String, Long, KeyValueStore<Bytes, byte[]>>as("counts-store2"));

          stream.to("test-out", Produced.with(Serdes.String(), Serdes.String()));
//        kTable.toStream().to("test-out", Produced.with(Serdes.String(), Serdes.Long()));


        Topology topology =  builder.build();
        System.out.println("topology = [" + topology.describe() + "]");
        KafkaStreams kafkaStreams = new KafkaStreams(topology,props);
        kafkaStreams.start();


////        textLines.foreach((key, value) -> {
////            System.out.println("key = [" + key + "]" + "value = [" + value + "]");
////        });
//       KTable<String,Long>wordCounts = textLines
//        // Split each text line, by whitespace, into words.
//            .flatMapValues(value -> Arrays.asList(value.toLowerCase().split("\\W+")))
//
//        // Group the text words as message keys
//            .groupBy((key, value) -> value)
//
//        // Count the occurrences of each word (message key).
//            .count();
//
//// Store the running counts as a changelog stream to the output topic.
//        wordCounts.toStream()
////                  .foreach((key,value)->{
////            System.out.println("key = [" + key + "]"+"value = [" + value + "]");
////        });
//                  .to("test4", Produced.with(Serdes.String(), Serdes.Long()));
//        KafkaStreams stream = new KafkaStreams(builder.build(), props);
//        stream.start();
    }

}