package com.flink.ttest.flinkj;

import com.alibaba.fastjson.JSON;
import com.flink.ttest.cofig.Order;
import com.flink.ttest.cofig.OrderAggre;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.api.common.eventtime.*;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.triggers.ContinuousEventTimeTrigger;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.math.BigDecimal;
import java.time.Duration;

/**
 * @author: LCG
 * @date: 2022-07-01 12:04:44
 * @description:  注意并行数不对的话会导致 不输出 （注意消耗kafka数据的时候 并行度和kafka的分区数保持一致） 否则可能导致flink不输出
 **/
public class TestWindowStream {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env=StreamExecutionEnvironment.getExecutionEnvironment();
        //开启检查点,使用kafka的话可以进行提交偏移量的操作，否则不提交偏移量
        env.enableCheckpointing(60000, CheckpointingMode.EXACTLY_ONCE);
        env.setParallelism(4);
        //设置周期水印的生成间隔
        env.getConfig().setAutoWatermarkInterval(3000);

        //新版本的定义Kafka数据源的方式
        KafkaSource<String> kafkaSource=KafkaSource.<String>builder()
                .setBootstrapServers("47.94.152.49:9092")
                .setGroupId("consumer1")
                .setTopics("TEST-TOPIC")
                //设置消费位置
                .setStartingOffsets(OffsetsInitializer.latest())
                .setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
                .build();


        //定义水印（水位线）
        WatermarkStrategy<String> stringWatermarkStrategy = WatermarkStrategy.<String>forBoundedOutOfOrderness(Duration.ofSeconds(20))
                .withIdleness(Duration.ofSeconds(20))
                .withTimestampAssigner(new SerializableTimestampAssigner<String>() {
                    @Override
                    public long extractTimestamp(String str, long l) {
                        Order order = JSON.parseObject(str, Order.class);
                        return order.orderDate;
                    }
                });

       /* WatermarkStrategy<String> stringWatermarkStrategy = WatermarkStrategy.forGenerator(new WatermarkGeneratorSupplier<String>() {
            @Override
            public WatermarkGenerator<String> createWatermarkGenerator(Context context) {
                WatermarkGenerator<String> watermarkGenerator = new WatermarkGenerator<String>() {
                    private long maxTimeStamp = 0L;
                    @Override
                    public void onEvent(String s, long l, WatermarkOutput watermarkOutput) {
                        Order order = JSON.parseObject(s, Order.class);
                        maxTimeStamp = Math.max(maxTimeStamp, order.orderDate);
                    }
                    @Override
                    public void onPeriodicEmit(WatermarkOutput watermarkOutput) {
                        System.out.println("===触发水位线===");
                        //延迟30秒触发，等待迟到的数据,用来处理乱序的流
                        watermarkOutput.emitWatermark(new Watermark(maxTimeStamp - 30000L-1));
                    }
                };
                return watermarkGenerator;
            }
        }).withIdleness(Duration.ofSeconds(50));
*/
        //添加Kafka数据源
        DataStreamSource<String> stringDataStreamSource = env.fromSource(kafkaSource, stringWatermarkStrategy, "my-window-job");


        DataStreamSource<String> stringDataStreamSource1 = stringDataStreamSource.setParallelism(4);
        //统计计算订单总数
        SingleOutputStreamOperator<Tuple3<String, Long, BigDecimal>> tuple2SingleOutputStreamOperator =
                stringDataStreamSource1.flatMap(new FlatMapFunction<String, Tuple3<String, Long,BigDecimal>>() {
                    @Override
                    public void flatMap(String s, Collector<Tuple3<String, Long,BigDecimal>> collector) throws Exception {
                        Order order = JSON.parseObject(s, Order.class);
                        collector.
                                collect(Tuple3.of(order.userId, 1L,
                                        new BigDecimal(order.getOrderPrice().toString())));
                    }
                });



        //定义测流

        SingleOutputStreamOperator<Tuple3<String, Long, BigDecimal>> reduce =
                tuple2SingleOutputStreamOperator
                        .keyBy(t -> t.f0)
                        .window(TumblingEventTimeWindows.of(Time.seconds(10)))
                        //.trigger(ContinuousEventTimeTrigger.of(Time.seconds(10)))
                        .allowedLateness(Time.seconds(10))
                        //.sideOutputLateData(lateOutputTag)
                .aggregate(new OrderAggre());


        reduce.addSink(new SinkFunction<Tuple3<String, Long, BigDecimal>>() {
            @Override
            public void invoke(Tuple3<String, Long, BigDecimal> value, Context context) throws Exception {
                System.out.println(value);
            }
        });



        env.execute("job-XXXXXXXXXX^_^");
    }
}
