package com.boot.stream;

import com.alibaba.fastjson.JSONObject;
import com.boot.model.Statistics;
import com.boot.stream.codec.JsonDeserializer;
import com.boot.stream.codec.JsonSerializer;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.*;
import org.apache.kafka.streams.kstream.internals.WindowedSerializer;
import org.apache.kafka.streams.state.WindowStore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;

import java.math.BigDecimal;
import java.time.Duration;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;

@RunWith(SpringRunner.class)
@SpringBootTest
public class SteamsDemo {

    private static final int WINDOW_SIZE = 300;

    @Autowired
    @Qualifier("streamProperties")
    private Properties properties;


    @Test
    public void main2() {
        Map<String, Object> serdeProps = new HashMap<>();
        final Serializer<Statistics> statisticsSerializer = new JsonSerializer<>();
        serdeProps.put("JsonModelClass", Statistics.class);
        statisticsSerializer.configure(serdeProps, false);

        final Deserializer<Statistics> statisticsDeserializer = new JsonDeserializer<>("JsonModelClass");
        statisticsDeserializer.configure(serdeProps, false);

        final Serde<Statistics> statisticsSerde = Serdes.serdeFrom(statisticsSerializer, statisticsDeserializer);

        //StreamsBuilder builder = new StreamsBuilderFactoryBean().getObject();
        StreamsBuilder builder = new StreamsBuilder();


        KStream<String, String> source = builder.stream("api-charge-stream");
        KGroupedStream<String, String> keyStream = source.selectKey((k, v) -> {
            JSONObject json = JSONObject.parseObject(v);
            return json.getString("key");
        }).groupByKey();

//     (1)branch
//        将一个stream转换为1到多个Stream stream ----->stream[]
        // (2)map | mapValues 将一条record映射为另外一条record
        // kStream.map((k,v) -> new KeyValue<String,Long>(k,(long) v.length())).foreach((k,v) -> System.out.println(k +"\t"+v));
        //(3)flatMap  将一个record展开，产生0到多个record record—>record1,record2…
       /* kStream.flatMap((k,v)->{
            List<KeyValue<String, String>> keyValues = new ArrayList<>();
            String[] words = v.split(" ");
            for (String word : words) {
                keyValues.add(new KeyValue<String, String>(k,word));
            }
            return keyValues;
        }).foreach((k,v)-> System.out.println(k+" | "+v));*/
        //(4)flatMapValues : line split 将一条record变成多条record并且将多条记录展开（k,v）–>(k,v1),(k,v2)…
        //kStream.flatMapValues((v)-> Arrays.asList(v.split(" "))).foreach((k,v)-> System.out.println(k+" | "+v));
        //(5)Merge
        /*KStream<byte[], String> stream1 = ...;
        KStream<byte[], String> stream2 = ...;
        KStream<byte[], String> merged = stream1.merge(stream2);*/
//        (6)Peek
//        作为程序执行的探针，一般用于debug调试，因为peek并不会对后续的流数据带来任何影响。
//        (7) SelectKey
//        修改记录中key(k,v)---->(newkey ,v)
        /* KStream<String, String> rekeyed = stream.selectKey((key, value) -> value.split(" ")[0])*/

        KStream<Windowed<String>, Statistics> win = keyStream.
                windowedBy(TimeWindows.of(Duration.ofSeconds(WINDOW_SIZE)))
                //聚合输入流可以是KStream形式或者KTable形式，但是输出流永远都是KTable
                .aggregate(new Initializer<Statistics>() {
                               @Override
                               public Statistics apply() {
                                   return new Statistics();
                               }
                           },
                        new Aggregator<String, String, Statistics>() {
                            @Override
                            public Statistics apply(String key, String value, Statistics s) {
                                JSONObject json = JSONObject.parseObject(value);
                                System.out.println("原始数据:" + value);
                                s.setCount(s.getCount() + 1);
                                int status = json.getInteger("status");
                                int time = json.getIntValue("time");
                                String fee = json.getString("fee");
                                if (status != 0) {
                                    s.setSuccess(s.getSuccess() + 1);
                                }
                                s.setSuccessRate(s.getSuccess() * 1.00 / s.getCount() * 1.00);
                                s.setTotalTime(s.getTotalTime() + time);
                                s.setAvgTime(s.getTotalTime() / s.getCount());
                                s.setSum(s.getSum().add(new BigDecimal(fee)));
                                return s;
                            }
                        },
                        Materialized.<String, Statistics, WindowStore<Bytes, byte[]>>as("time-windowed-aggregated-store").withValueSerde(statisticsSerde)
                ).toStream().map(new KeyValueMapper<Windowed<String>, Statistics, KeyValue<Windowed<String>, Statistics>>() {
                    @Override
                    public KeyValue<Windowed<String>, Statistics> apply(Windowed<String> key, Statistics value) {
                        value.setKey(key.key());
                        value.setWindow(Long.parseLong(LocalDateTime.ofInstant(key.window().startTime(), ZoneId.systemDefault()).format(DateTimeFormatter.ofPattern("yyyyMMddHHmmss"))));
                        return new KeyValue<>(key, value);
                    }
                })
//                .filter(new Predicate<Windowed<String>, Statistics>() {
//                    @Override
//                    public boolean test(Windowed<String> key, Statistics value) {
//                        value.setKey(key.key());
//                        value.setWindow(Long.parseLong(LocalDateTime.ofInstant(key.window().startTime(), ZoneId.systemDefault()).format(DateTimeFormatter.ofPattern("yyyyMMddHHmmss"))));
//                        System.err.println("send value=" + JSONObject.toJSONString(value));
//                        return true;
//                    }
//                })
                ;

        WindowedSerializer<String> windowedSerializer = new TimeWindowedSerializer<>(Serdes.String().serializer());
        Deserializer<Windowed<String>> windowedDeserializer = new TimeWindowedDeserializer(Serdes.String().deserializer(), (long) WINDOW_SIZE);
        Serde<Windowed<String>> windowedSerde = Serdes.serdeFrom(windowedSerializer, windowedDeserializer);
        win.to("steams-res", Produced.with(windowedSerde, statisticsSerde));

        Topology topology = builder.build();
        final KafkaStreams streams = new KafkaStreams(topology, this.properties);
        System.out.println(topology.describe());
        final CountDownLatch latch = new CountDownLatch(1);
        // attach shutdown handler to catch control-c
//        Runtime.getRuntime().
//                addShutdownHook(new Thread("shutdown-hook") {
//                    @Override
//                    public void run() {
//                        streams.close();
//                        latch.countDown();
//                    }
//                });
        Runtime.getRuntime().addShutdownHook(new Thread(() -> {
            streams.close();
            latch.countDown();
        }));

        try {
            streams.start();
            latch.await();
        } catch (Throwable e) {
            System.exit(1);
        }
        System.exit(0);
    }

//    @Test
//    public void main() throws Exception {
//        StreamsBuilder builder = new StreamsBuilder();
//        KStream<String, String> source = builder.stream("iot-temperature");
//
//        KStream<Windowed<String>, String> max = source
//                .selectKey(new KeyValueMapper<String, String, String>() {
//                    @Override
//                    public String apply(String key, String value) {
//                        JSONObject json = JSONObject.parseObject(value);
//                        return json.getString("key");
//                    }
//                })
//                .groupByKey()
//                .windowedBy(TimeWindows.of(Duration.ofSeconds(TEMPERATURE_WINDOW_SIZE)))
//                .reduce(new Reducer<String>() {
//                    @Override
//                    public String apply(String value1, String value2) {
//                        System.out.println("value1=" + value1 + ", value2=" + value2);
//                        JSONObject json = JSON.parseObject(value2);
////                        Integer temperature = json.getInteger("temp");
////                        if (temperature > Integer.parseInt(value2)) {
////                            return temperature.toString();
////                        } else {
////                        return value2;
////                        }
//                        return value2;
//                    }
//                })
//                .toStream()
//                //过滤条件就是温度大于20
////                .filter(new Predicate<Windowed<String>, String>() {
////                    @Override
////                    public boolean test(Windowed<String> key, String value) {
////                        System.out.println(LocalDateTime.ofInstant(key.window().startTime(), ZoneId.systemDefault()).format(DateTimeFormatter.ofPattern("yyyyMMddHHmmss")));
////                        System.out.println("key=" + key + ", value=" + value);
////                        JSONObject json = JSON.parseObject(value);
////                        Integer temperature = json.getInteger("temp");
////                        return temperature > 10;
////                    }
////                })
//
//                ;
//
//        WindowedSerializer<String> windowedSerializer = new TimeWindowedSerializer<>(Serdes.String().serializer());
//        Deserializer<Windowed<String>> windowedDeserializer = new TimeWindowedDeserializer<>(Serdes.String().deserializer(),TEMPERATURE_WINDOW_SIZE);
//
//        Serde<Windowed<String>> windowedSerde = Serdes.serdeFrom(windowedSerializer, windowedDeserializer);
//        max.to("iot-temperature-max", Produced.with(windowedSerde, Serdes.String()));
//
//        final KafkaStreams streams = new KafkaStreams(builder.build(), this.properties);
//        final CountDownLatch latch = new CountDownLatch(1);
//
//        Runtime.getRuntime().addShutdownHook(new Thread("streams-temperature-shutdown-hook") {
//            @Override
//            public void run() {
//                streams.close();
//                latch.countDown();
//            }
//        });
//
//        try {
//            streams.start();
//            latch.await();
//        } catch (Throwable e) {
//            System.exit(1);
//        }
//        System.exit(0);
//    }
}
