package com.flink.ttest.flinkj;

import com.alibaba.fastjson.JSON;
import com.flink.ttest.cofig.Order;
import com.sun.xml.internal.ws.api.policy.SourceModel;
import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.api.common.eventtime.*;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.shaded.jackson2.org.yaml.snakeyaml.events.Event;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.runtime.operators.util.AssignerWithPeriodicWatermarksAdapter;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.math.BigDecimal;
import java.time.Duration;
import java.time.temporal.TemporalUnit;
import java.util.Map;
import java.util.concurrent.TimeUnit;

/**
 * @author: LCG
 * @date: 2022-06-30 11:40:57
 * @description: 有界窗口流计算  （窗口分为：滚动窗口  滑动窗口  session窗口）
 **/
public class WindowFlinkStream {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env=StreamExecutionEnvironment.getExecutionEnvironment();
        //开启检查点
        env.enableCheckpointing(60000, CheckpointingMode.EXACTLY_ONCE);
        env.setParallelism(1);
        /*设置使用EventTime作为Flink的时间处理标准，不指定默认是ProcessTime*/
        //env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        //新版本的定义Kafka数据源的方式
        KafkaSource<String> kafkaSource=KafkaSource.<String>builder()
                .setBootstrapServers("47.94.152.49:9092")
                .setGroupId("consumer1")
                .setTopics("TEST-TOPIC")
                //设置消费位置
                .setStartingOffsets(OffsetsInitializer.latest())
                .setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
                .build();

        //定义水位线  窗口操作不定义水位线的话 如果发生数据的时间对不上 则不会输出
        //WatermarkStrategy<String> 这里的泛型是取自消息的类型
        /*WatermarkStrategy<String> stringWatermarkStrategy1 = WatermarkStrategy.<String>forBoundedOutOfOrderness(Duration.ofSeconds(30)).withTimestampAssigner(new SerializableTimestampAssigner<String>() {
            @Override
            public long extractTimestamp(String str, long l) {
                Order order = JSON.parseObject(str, Order.class);
                return order.orderDate;
            }
        });*/

        WatermarkStrategy<String> stringWatermarkStrategy = WatermarkStrategy.forGenerator(new WatermarkGeneratorSupplier<String>() {
            @Override
            public WatermarkGenerator<String> createWatermarkGenerator(Context context) {
                WatermarkGenerator<String> watermarkGenerator = new WatermarkGenerator<String>() {

                    private long maxTimeStamp = 0L;

                    @Override
                    public void onEvent(String s, long l, WatermarkOutput watermarkOutput) {
                        Order order = JSON.parseObject(s, Order.class);
                        maxTimeStamp = Math.max(maxTimeStamp, order.orderDate);
                    }

                    @Override
                    public void onPeriodicEmit(WatermarkOutput watermarkOutput) {
                        //延迟30秒触发，等待迟到的数据,用来处理乱序的流
                        watermarkOutput.emitWatermark(new Watermark(maxTimeStamp - 30000L));
                    }



                };
                return watermarkGenerator;
            }
        }).withTimestampAssigner(new SerializableTimestampAssigner<String>() {
            @Override
            public long extractTimestamp(String str, long l) {
                Order order = JSON.parseObject(str, Order.class);
                return order.orderDate;
            }
        });


/*        WatermarkStrategy<String> stringWatermarkStrategy = WatermarkStrategy.<String>forMonotonousTimestamps().withTimestampAssigner(new SerializableTimestampAssigner<String>() {
            @Override
            public long extractTimestamp(String str, long l) {
                Order order = JSON.parseObject(str, Order.class);
                return order.orderDate;
            }
        });*/

        //添加kafka数据源
        DataStreamSource<String> stringDataStreamSource =env.fromSource(kafkaSource,stringWatermarkStrategy, "My--Kafka Source");


        //统计计算订单总数
        SingleOutputStreamOperator<Tuple3<String, Long,BigDecimal>> tuple2SingleOutputStreamOperator =
                stringDataStreamSource.flatMap(new FlatMapFunction<String, Tuple3<String, Long,BigDecimal>>() {
            @Override
            public void flatMap(String s, Collector<Tuple3<String, Long,BigDecimal>> collector) throws Exception {
                Order order = JSON.parseObject(s, Order.class);
                collector.collect(Tuple3.of(order.userId, 1L,new BigDecimal(order.getOrderPrice().toString())));
            }
        });

        //设置水位信息
        //TumblingEventTimeWindows 滚动窗口
        SingleOutputStreamOperator<Tuple3<String, Long, BigDecimal>> reduce =
                tuple2SingleOutputStreamOperator.keyBy(t -> t.f0)
                .window(TumblingEventTimeWindows.of(Time.seconds(10)))

                .reduce(new ReduceFunction<Tuple3<String, Long, BigDecimal>>() {
                    @Override
                    public Tuple3<String, Long, BigDecimal> reduce(Tuple3<String, Long, BigDecimal> stringLongBigDecimalTuple3, Tuple3<String, Long, BigDecimal> t1) throws Exception {
                        t1.f1=stringLongBigDecimalTuple3.f1+t1.f1;
                        t1.f2=stringLongBigDecimalTuple3.f2.add(t1.f2);
                        System.out.println("==========数据计算===》"+t1);
                        return t1;
                    }
                });

        reduce.addSink(new SinkFunction<Tuple3<String, Long, BigDecimal>>() {
            @Override
            public void invoke(Tuple3<String, Long, BigDecimal> value, Context context) throws Exception {
                System.out.println(value);
            }
        });

        reduce.print();

        env.execute("My JOb");

    }

}
