package cn._51doit.live.jobs;

import cn._51doit.live.pojo.OrderDetail;
import cn._51doit.live.pojo.OrderMain;
import cn._51doit.live.udf.JsonToOrderDetailFunction;
import cn._51doit.live.udf.JsonToOrderMainFunction;
import cn._51doit.live.utils.FlinkUtilsV2;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.CoGroupFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.time.Duration;

/**
 * 使用Flink实现多流join
 * <p>
 * 订单主表和订单明显表进行join
 * 1.使用什么样类型的时间进行join（EventTime）
 * 2.划分什么样类型的窗口（滚动窗口）
 * 3.使用什么样的方式进行join（LeftOuterJoin）
 * 4.数据如果迟到了怎么办？数据没join上怎么办？
 */
public class OrderCount {

    public static void main(String[] args) throws Exception {

        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);
        String orderMainTopics = parameterTool.getRequired("order.main.topics");
        String orderDetailTopics = parameterTool.getRequired("order.detail.topics");

        //读取主表的数据
        DataStream<String> mainStream = FlinkUtilsV2.createKafkaStream(parameterTool, orderMainTopics, SimpleStringSchema.class);
        //将JSON转成Bean
        SingleOutputStreamOperator<OrderMain> orderMainStream = mainStream.process(new JsonToOrderMainFunction());

        //读取明显表的数据
        DataStream<String> detailStream = FlinkUtilsV2.createKafkaStream(parameterTool, orderDetailTopics, SimpleStringSchema.class);
        //将JSON转成Bean
        SingleOutputStreamOperator<OrderDetail> orderDetailStream = detailStream.process(new JsonToOrderDetailFunction());

        SingleOutputStreamOperator<OrderMain> orderMainWithWaterMark = orderMainStream.assignTimestampsAndWatermarks(WatermarkStrategy.<OrderMain>forBoundedOutOfOrderness(Duration.ofSeconds(0)).withTimestampAssigner(new SerializableTimestampAssigner<OrderMain>() {
            @Override
            public long extractTimestamp(OrderMain element, long recordTimestamp) {
                return element.getUpdate_time().getTime();
            }
        }));


        SingleOutputStreamOperator<OrderDetail> orderDetailWithWaterMark = orderDetailStream.assignTimestampsAndWatermarks(WatermarkStrategy.<OrderDetail>forBoundedOutOfOrderness(Duration.ofSeconds(0)).withTimestampAssigner(new SerializableTimestampAssigner<OrderDetail>() {
            @Override
            public long extractTimestamp(OrderDetail element, long recordTimestamp) {
                return element.getUpdate_time().getTime();
            }
        }));

        OutputTag<OrderDetail> outputTag = new OutputTag<OrderDetail>("late-data") {};

        //将订单明显表划分到一个窗口中
        //在没有join之前，先将明显表，划分一个窗口，窗口的类型、长度与后面join的窗口一致
        //如果数据进入到前面的窗口迟到了，进入后的join窗口也是迟到的
        //在第一个窗口中，使用侧流输出获取迟到的数据
        SingleOutputStreamOperator<OrderDetail> orderDetailStreamWithLateData = orderDetailWithWaterMark
                .keyBy(d -> d.getOrder_id())
                .window(TumblingEventTimeWindows.of(Time.seconds(5)))
                .sideOutputLateData(outputTag) //指定迟到的标签
                .apply(new WindowFunction<OrderDetail, OrderDetail, Long, TimeWindow>() {
                    @Override
                    public void apply(Long aLong, TimeWindow window, Iterable<OrderDetail> input, Collector<OrderDetail> out) throws Exception {
                        for (OrderDetail orderDetail : input) {
                            out.collect(orderDetail);
                        }
                    }
                });

        //获取明显表中迟到的数据
        DataStream<OrderDetail> orderDetailLateStream = orderDetailStreamWithLateData.getSideOutput(outputTag);
        SingleOutputStreamOperator<Tuple2<OrderDetail, OrderMain>> lateTupleStream = orderDetailLateStream.map(new MapFunction<OrderDetail, Tuple2<OrderDetail, OrderMain>>() {
            @Override
            public Tuple2<OrderDetail, OrderMain> map(OrderDetail detail) throws Exception {
                return Tuple2.of(detail, null);
            }
        });


        //将明显表作为左表（明细表多，如果join不上，需要查库，查询的数据会比较少）
        DataStream<Tuple2<OrderDetail, OrderMain>> joinedStream = orderDetailWithWaterMark.coGroup(orderMainWithWaterMark)
                .where(d -> d.getOrder_id())
                .equalTo(m -> m.getOid())
                .window(TumblingEventTimeWindows.of(Time.seconds(5)))

                .apply(new CoGroupFunction<OrderDetail, OrderMain, Tuple2<OrderDetail, OrderMain>>() {

                    @Override
                    public void coGroup(Iterable<OrderDetail> first, Iterable<OrderMain> second, Collector<Tuple2<OrderDetail, OrderMain>> out) throws Exception {
                        //左外连接
                        boolean isJoined = false;
                        for (OrderDetail orderDetail : first) {

                            for (OrderMain orderMain : second) {
                                isJoined = true;
                                out.collect(Tuple2.of(orderDetail, orderMain));
                            }
                            if (!isJoined) {
                                out.collect(Tuple2.of(orderDetail, null));
                            }

                        }

                    }
                });

        //将join后的流，与迟到的数据进行union
        SingleOutputStreamOperator<Tuple2<OrderDetail, OrderMain>> res = joinedStream.union(lateTupleStream).map(new RichMapFunction<Tuple2<OrderDetail, OrderMain>, Tuple2<OrderDetail, OrderMain>>() {

            @Override
            public Tuple2<OrderDetail, OrderMain> map(Tuple2<OrderDetail, OrderMain> tp) throws Exception {
                if (tp.f1 == null) {
                    //产线数据库，根据条件查找Order main的数据
                    tp.f1 = new OrderMain(); //应该查询数据库，返回OrderMain()的信息
                }
                return tp;
            }
        });
        //将join后的数据做多维统计
        //使用FlinkSQL实现多维统计

        FlinkUtilsV2.env.execute();

    }
}
