package com.gy.flink.project.data.report;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.annotation.Nullable;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.Iterator;
import java.util.Properties;

public class DataReport {
    final static Logger log = LoggerFactory.getLogger(DataReport.class);
    final static DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");

    public static void main(String[] args) throws Exception {

        String consumerTopic = "allDataClean";
        Properties properties = new Properties();
        properties.put("bootstrap.servers", "centos102:9092");
        properties.put("group.id", "report_uat_002");

        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        //读取数据 根据 kafka里面topic的partition来的
        env.setParallelism(3);
        env.enableCheckpointing(60 * 1000);
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(10 * 1000);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION);
//        env.setStateBackend(new RocksDBStateBackend("/flink/RocksDBStateBackend/etl/"));

        //设置time
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<>(consumerTopic, new SimpleStringSchema(), properties);
        DataStreamSource<String> dataStream = env.addSource(consumer);

        /**
         * 数据转换
         */
        SingleOutputStreamOperator<Tuple4<Long, String, String, String>> transDataStream = dataStream.map(new MapFunction<String, Tuple4<Long, String, String, String>>() {

            @Override
            public Tuple4<Long, String, String, String> map(String value) throws Exception {
                JSONObject originJson = JSON.parseObject(value);
                String dt = originJson.getString("dt");
                String type = originJson.getString("type");
                String area = originJson.getString("area");
                long time = 0;

                try {
                    time = LocalDateTime.parse(dt, dtf).toInstant(ZoneOffset.of("+8")).toEpochMilli();
                } catch (Exception e) {
                    e.printStackTrace();
                    log.error("时间解析异常：" + e.getMessage());
                }
                return Tuple4.of(time, area, type, originJson.toJSONString());
            }
        });
        /**
         * 收集延迟太大的数据
         */
        OutputTag<Tuple4<Long, String, String, String>> outputTag = new OutputTag<Tuple4<Long, String, String, String>>("late-date") {
        };


        /**
         * 数据过滤
         */
        SingleOutputStreamOperator<Tuple4<Long, String, String, String>> filterStream = transDataStream.filter(x -> x.f0 != 0 && x.f1 != null);

        SingleOutputStreamOperator<Tuple4<String, String, String, Long>> resultStream = filterStream.assignTimestampsAndWatermarks(watermark())
                .keyBy(1, 2)
                .window(TumblingEventTimeWindows.of(Time.seconds(10)))
                .sideOutputLateData(outputTag)
                .apply(apply());


        /**
         * 数据处理
         */
        resultStream.print();

        /**
         * 延迟太多的数据处理
         */
        resultStream.getSideOutput(outputTag)
                .addSink(new SinkFunction<Tuple4<Long, String, String, String>>() {
                    @Override
                    public void invoke(Tuple4<Long, String, String, String> value, Context context) throws Exception {
                        System.out.println("发送到kakfa:" + value);
                    }
                });


        env.execute(DataReport.class.getSimpleName());
    }

    private static WindowFunction<Tuple4<Long, String, String, String>, Tuple4<String, String, String, Long>, Tuple, TimeWindow> apply() {
        return new WindowFunction<Tuple4<Long, String, String, String>, Tuple4<String, String, String, Long>, Tuple, TimeWindow>() {
            @Override
            public void apply(Tuple tuple, TimeWindow window, Iterable<Tuple4<Long, String, String, String>> input, Collector<Tuple4<String, String, String, Long>> out) throws Exception {
                //  获取分组字段信息
                String area = tuple.getField(0).toString();
                String type = tuple.getField(1).toString();

                Iterator<Tuple4<Long, String, String, String>> it = input.iterator();
                long count = 0L;
                while (it.hasNext()) {
                    it.next();
                    count++;
                }
                out.collect(Tuple4.of(LocalDateTime.ofInstant(Instant.ofEpochMilli(window.getEnd()), ZoneId.systemDefault()).format(dtf), area, type, count));

            }

        };
    }


    private static AssignerWithPeriodicWatermarks<Tuple4<Long, String, String, String>> watermark() {
        //周期性的水位 还有条件的水位
        return new AssignerWithPeriodicWatermarks<Tuple4<Long, String, String, String>>() {

            long currMaxTime = 0L;
            final long maxOutTime = 20 * 1000;//最大乱序时间

            @Override
            public long extractTimestamp(Tuple4<Long, String, String, String> element, long previousElementTimestamp) {
                Long timeStamp = element.f0;
                currMaxTime = Math.max(timeStamp, currMaxTime);
                return timeStamp;
            }

            @Nullable
            @Override
            public Watermark getCurrentWatermark() {
                return new Watermark(currMaxTime - maxOutTime);
            }
        };
    }
}
