package cn._51doit.live.jobs;

import cn._51doit.live.pojo.OrderDetail;
import cn._51doit.live.pojo.OrderMain;
import cn._51doit.live.udfs.JsonToOrderDetailFunction;
import cn._51doit.live.udfs.JsonToOrderMainFunction;
import cn._51doit.live.udfs.OrderLeftJoinFunction;
import cn._51doit.live.utils.FlinkUtils;
import cn._51doit.live.utils.FlinkUtilsV2;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.time.Duration;

/**
 * 对业务库中的订单数据进行统计
 */
public class OrderCount {

    public static void main(String[] args) throws Exception {

        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);

        //测试情况，为了出现效果方便，所有将程序的并行度实在为1，生产环境不能该成1
        FlinkUtilsV2.env.setParallelism(1);
        DataStream<String> orderMainLines = FlinkUtilsV2.createKafkaStream(parameterTool, "tp-order-main", "g01", SimpleStringSchema.class);

        DataStream<String> orderDetailLines = FlinkUtilsV2.createKafkaStream(parameterTool, "tp-order-detail", "g01", SimpleStringSchema.class);

        //将数据进行转换（将json字符转成Bean）
        SingleOutputStreamOperator<OrderMain> orderMainStream = orderMainLines.process(new JsonToOrderMainFunction());
        SingleOutputStreamOperator<OrderDetail> orderDetailStream = orderDetailLines.process(new JsonToOrderDetailFunction());

        //提取EventTime，生成WaterMark(划分EventTime类型的滑动窗口)

        SingleOutputStreamOperator<OrderMain> orderMainWithWaterMark = orderMainStream.assignTimestampsAndWatermarks(WatermarkStrategy.<OrderMain>forBoundedOutOfOrderness(Duration.ofSeconds(0)).withTimestampAssigner(new SerializableTimestampAssigner<OrderMain>() {
            @Override
            public long extractTimestamp(OrderMain element, long recordTimestamp) {
                return element.getUpdate_time().getTime();
            }
        }));


        SingleOutputStreamOperator<OrderDetail> orderDetailWithWaterMark = orderDetailStream.assignTimestampsAndWatermarks(WatermarkStrategy.<OrderDetail>forBoundedOutOfOrderness(Duration.ofSeconds(0)).withTimestampAssigner(new SerializableTimestampAssigner<OrderDetail>() {
            @Override
            public long extractTimestamp(OrderDetail element, long recordTimestamp) {
                return element.getUpdate_time().getTime();
            }
        }));

        OutputTag<OrderDetail> lateDataTag = new OutputTag<OrderDetail>("lateData") {
        };

        //事先定义一个与join后类型相同、长度一样的窗口
        //如果数据进入到第一个窗口，数据迟到了，说要以后进入到后的窗口也是迟到的
        WindowedStream<OrderDetail, Long, TimeWindow> orderDetailWithLateData = orderDetailWithWaterMark.keyBy(OrderDetail::getOrder_id)
                .window(TumblingEventTimeWindows.of(Time.seconds(5)))
                .sideOutputLateData(lateDataTag);

        //获取迟到的数据
        DataStream<Tuple2<OrderDetail, OrderMain>> orderDetailLateData = orderDetailWithLateData.process(new ProcessWindowFunction<OrderDetail, OrderDetail, Long, TimeWindow>() {
            @Override
            public void process(Long aLong, Context context, Iterable<OrderDetail> elements, Collector<OrderDetail> out) throws Exception {
                for (OrderDetail element : elements) {
                    out.collect(element);
                }
            }
        }).getSideOutput(lateDataTag).map(new MapFunction<OrderDetail, Tuple2<OrderDetail, OrderMain>>() {
            @Override
            public Tuple2<OrderDetail, OrderMain> map(OrderDetail value) throws Exception {
                return Tuple2.of(value, null);
            }
        });


        //使用的是左外连接（将明细表当成左表，订单主表当成右表），如果主表的对应流迟到，可以输出左表流的数据，然后根据左表的数据再查询数据库
        //如果左表的数据（明细表对应的数据迟到，数据就丢失了）

        //将两个数据流进行左外连接（将明显表当成左表，明显表数据相对多）
        DataStream<Tuple2<OrderDetail, OrderMain>> joined = orderDetailWithWaterMark.coGroup(orderMainWithWaterMark)
                .where(OrderDetail::getOrder_id)
                .equalTo(OrderMain::getOid)
                .window(TumblingEventTimeWindows.of(Time.seconds(5)))
                //两个或join后，划分窗口没法调用sideOutputLateData
                .apply(new OrderLeftJoinFunction());

        //经过左外连接后的流，可以能没有join上，即orderdetail不为空，而ordermain为空

        DataStream<Tuple2<OrderDetail, OrderMain>> union = joined.union(orderDetailLateData);

        SingleOutputStreamOperator<Tuple2<OrderDetail, OrderMain>> res = union.map(new RichMapFunction<Tuple2<OrderDetail, OrderMain>, Tuple2<OrderDetail, OrderMain>>() {

            @Override
            public void open(Configuration parameters) throws Exception {
                //创建数据库连接

            }

            @Override
            public Tuple2<OrderDetail, OrderMain> map(Tuple2<OrderDetail, OrderMain> tp) throws Exception {
                if (tp.f1 == null) {
                    Long oid = tp.f0.getOrder_id();
                    //TODO 根据订单主表id查询订单主表的相关信息

                    //tp.f1 = 查出来的OrderMain
                }
                return tp;
            }

            @Override
            public void close() throws Exception {
                //关闭连接
            }
        });

        //将join后的大宽表，写入到clickhouse中，进行多维度查询
        res.print();


        FlinkUtilsV2.env.execute();


    }
}
