package com.bawei.flink.adsstatic;

import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.api.java.tuple.Tuple5;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import scala.Int;

import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Properties;

public class CItyPVTopN {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(10000);
        env.setParallelism(1);
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);

// make sure 500 ms of progress happen between checkpoints
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(500);

// checkpoints have to complete within one minute, or are discarded
        env.getCheckpointConfig().setCheckpointTimeout(60000);

// only two consecutive checkpoint failures are tolerated
        env.getCheckpointConfig().setTolerableCheckpointFailureNumber(2);

// allow only one checkpoint to be in progress at the same time
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);

// enable externalized checkpoints which are retained
// after job cancellation
        env.getCheckpointConfig().enableExternalizedCheckpoints(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

// enables the experimental unaligned checkpoints
        //env.getCheckpointConfig().enableUnalignedCheckpoints();

// sets the checkpoint storage where checkpoint snapshots will be written
        env.getCheckpointConfig().setCheckpointStorage("hdfs://cdh1:8020/flinkcheckpoints");

        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "cdh1:9092,cdh2:9092,cdh3:9092");
        properties.setProperty("group.id", "ads_flink_consumer");
        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<>("ads_log", new SimpleStringSchema(), properties);

        consumer.setCommitOffsetsOnCheckpoints(true);
        consumer.setStartFromEarliest();

        DataStreamSource<String> lines = env.addSource(consumer);
        SingleOutputStreamOperator<Tuple5<Long, String, String, Integer, Integer>> flated = lines.flatMap(new FlatMapFunction<String, Tuple5<Long, String, String, Integer, Integer>>() {
            @Override
            public void flatMap(String value, Collector<Tuple5<Long, String, String, Integer, Integer>> out) throws Exception {
                String[] arr = value.split(",");
                if (arr.length == 5) {
                    try {
                        Long ts = Long.valueOf(arr[0]);
                        String area = arr[1];
                        String city = arr[2];
                        Integer userId = Integer.valueOf(arr[3]);
                        Integer adsId = Integer.valueOf(arr[4]);

                        out.collect(Tuple5.of(ts, area, city, userId, adsId));
                    } catch (Exception e) {

                    }
                }
            }
        });

        SingleOutputStreamOperator<Tuple5<Long, String, String, Integer, Integer>> flatedWithWaterMarker = flated.assignTimestampsAndWatermarks(WatermarkStrategy.<Tuple5<Long, String, String, Integer, Integer>>forBoundedOutOfOrderness(Duration.ofSeconds(5))
                .withTimestampAssigner(new SerializableTimestampAssigner<Tuple5<Long, String, String, Integer, Integer>>() {
                    @Override
                    public long extractTimestamp(Tuple5<Long, String, String, Integer, Integer> element, long recordTimestamp) {
                        return element.f0;
                    }
                }));

        SingleOutputStreamOperator<Tuple5<String, Long,String, Integer, Long>> processed = flatedWithWaterMarker.keyBy(new KeySelector<Tuple5<Long, String, String, Integer, Integer>, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> getKey(Tuple5<Long, String, String, Integer, Integer> value) throws Exception {
                return Tuple2.of(value.f2, value.f4);
            }
        })
                .window(TumblingEventTimeWindows.of(Time.minutes(1)))
                .process(new ProcessWindowFunction<Tuple5<Long, String, String, Integer, Integer>, Tuple5<String, Long,String, Integer, Long>, Tuple2<String, Integer>, TimeWindow>() {

                    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

                    @Override
                    public void process(Tuple2<String, Integer> key, Context context, Iterable<Tuple5<Long, String, String, Integer, Integer>> elements, Collector<Tuple5<String, Long,String, Integer, Long>> out) throws Exception {
                        Long sum = 0L;
                        for (Tuple5<Long, String, String, Integer, Integer> element : elements) {
                            sum += 1L;
                        }
                        String startTime = sdf.format(context.window().getStart());
                        long endTime = context.window().getEnd();
                        out.collect(Tuple5.of(startTime, endTime,key.f0, key.f1, sum));
                    }
                });

        SingleOutputStreamOperator<Tuple5<String, String,String, Integer, Long>> res = processed.keyBy(new KeySelector<Tuple5<String, Long, String, Integer, Long>, Tuple2<String, String>>() {
            @Override
            public Tuple2<String, String> getKey(Tuple5<String, Long, String, Integer, Long> value) throws Exception {
                return Tuple2.of(value.f0, value.f2);
            }
        }).process(new KeyedProcessFunction<Tuple2<String, String>, Tuple5<String, Long, String, Integer, Long>, Tuple5<String, String,String, Integer, Long>>() {

            private transient ListState<Tuple5<String, Long, String, Integer, Long>> list;
            SimpleDateFormat sdf;

            @Override
            public void open(Configuration parameters) throws Exception {
                ListStateDescriptor listStateDescriptor = new ListStateDescriptor<>("liststate",
                        TypeInformation.of(new TypeHint<Tuple5<String, Long, String, Integer, Long>>() {
                        })
                );

                sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

                list = getRuntimeContext().getListState(listStateDescriptor);
            }

            @Override
            public void processElement(Tuple5<String, Long, String, Integer, Long> value, Context ctx, Collector<Tuple5<String, String,String, Integer, Long>> out) throws Exception {
                list.add(value);
                ctx.timerService().registerEventTimeTimer(value.f1 + 1);
            }

            @Override
            public void onTimer(long timestamp, OnTimerContext ctx, Collector<Tuple5<String, String,String, Integer, Long>> out) throws Exception {
                Iterator<Tuple5<String, Long, String, Integer, Long>> it = list.get().iterator();
                ArrayList<Tuple5<String, Long, String, Integer, Long>> arrList = new ArrayList<>();
                while (it.hasNext()) {
                    Tuple5<String, Long, String, Integer, Long> next = it.next();
                    arrList.add(next);
                }
                if (arrList.size() != 0) {
                    arrList.sort(new Comparator<Tuple5<String, Long, String, Integer, Long>>() {
                        @Override
                        public int compare(Tuple5<String, Long, String, Integer, Long> o1, Tuple5<String, Long, String, Integer, Long> o2) {
                            return o2.f4.compareTo(o1.f4);
                        }
                    });

                    Tuple5<String, Long, String, Integer, Long> top = arrList.get(0);
                    String endTime = sdf.format(top.f1);
                    out.collect(Tuple5.of(top.f0, endTime,top.f2, top.f3, top.f4));
                }


            }
        });

        res.addSink(new KuduSinkFunction());

        env.execute("staticCityPV");


    }

}
