package net.bwie.realtime.jtp.dwd.trade.job;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import net.bwie.realtime.jtp.common.utils.DateTimeUtil;
import net.bwie.realtime.jtp.common.utils.DorisUtil;
import net.bwie.realtime.jtp.common.utils.KafkaUtil;
import net.bwie.realtime.jtp.dwd.trade.function.LoadProvinceDimMapFunction;
import net.bwie.realtime.jtp.dwd.trade.function.RequestDicDimAsyncFunction;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.concurrent.TimeUnit;

public class JtpOrderDetailDorisDwdJob {
    public static void main(String[] args) throws Exception {
        // 1. 执行环境-env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 2. 数据源-source
        DataStream<String> dataStream = KafkaUtil.consumerKafka(env, "jtp_mall_topic");
        //dataStream.print();

        // 3. 数据转换-transformation
        DataStream<String> dwdStream = handle(dataStream) ;
        //dwdStream.print();

        // 4. 数据接收器-sink
        DorisUtil.saveToDoris(dwdStream, "jtp_mall_doris", "dwd_order_detail" );

        // 5. 触发执行-execute
        env.execute("JtpOrderDetailDorisDwdJob");
    }

    /**
     * 将订单数据关联，并且添加维度信息，形成订单明细宽表，最后写入Doris，基于物化视图实时数据聚合。
     *  1-数据流JOIN：Interval Join
     *  2-省份维度补全：预加载
     *  3-字典维度补全：异步IO
     */
    private static DataStream<String> handle(DataStream<String> stream) {
        // 1-数据流JOIN：Interval Join，订单明细数据流 关联 订单信息数据流【大数据流 关联 大数据流】
        DataStream<String> joinStream = joinProcess(stream);
        //joinStream.print("join");

        // 2-省份维度补全：预加载
        DataStream<String> provinceStream = joinStream.map(new LoadProvinceDimMapFunction());
        //provinceStream.print("province") ;

        // 3-3-字典维度补全：异步IO
        DataStream<String> dwdStream = AsyncDataStream.orderedWait(
                provinceStream,
                new RequestDicDimAsyncFunction(),
                10000,
                TimeUnit.MILLISECONDS,
                100
        );
        //dwdStream.print("dwd");

        return dwdStream ;
    }

    private static DataStream<String> joinProcess(DataStream<String> stream) {
        /*
           todo: step1. 获取数据流
        */
        // 1-1). 订单明细数据流
        SingleOutputStreamOperator<String> detailStream = stream.filter(new RichFilterFunction<String>() {
            @Override
            public boolean filter(String value) throws Exception {
                // 解析json数据
                JSONObject jsonObject = JSON.parseObject(value);
                // 过滤
                return "order_detail".equals(jsonObject.getString("table_name"))
                        && "insert".equals(jsonObject.getString("operate_type"));
            }
        });

        // 1-2). 订单数据流
        SingleOutputStreamOperator<String> infoStream = stream.filter(new RichFilterFunction<String>() {
            @Override
            public boolean filter(String value) throws Exception {
                // 解析json数据
                JSONObject jsonObject = JSON.parseObject(value);
                // 过滤
                return "order_info".equals(jsonObject.getString("table_name"))
                        && "insert".equals(jsonObject.getString("operate_type"));
            }
        });

        /*
           todo: step2. 指定事件时间和Watermark水位线
        */
        // 2-1). 订单数据流
        SingleOutputStreamOperator<String> timeInfoStream = infoStream.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<String>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner(new SerializableTimestampAssigner<String>() {
                            @Override
                            public long extractTimestamp(String element, long recordTimestamp) {
                                // 解析
                                String eventTime = JSON.parseObject(element)
                                        .getJSONObject("operate_data").getString("create_time");
                                // 转换时间戳
                                return DateTimeUtil.convertStringToLong(eventTime, "yyyy-MM-dd HH:mm:ss");
                            }
                        })
        );

        // 2-2). 订单明细流
        SingleOutputStreamOperator<String> timeDetailStream = detailStream.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<String>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner(new SerializableTimestampAssigner<String>() {
                            @Override
                            public long extractTimestamp(String element, long recordTimestamp) {
                                // 解析
                                String eventTime = JSON.parseObject(element)
                                        .getJSONObject("operate_data").getString("create_time");
                                // 转换时间戳
                                return DateTimeUtil.convertStringToLong(eventTime, "yyyy-MM-dd HH:mm:ss");
                            }
                        })
        );

        /*
           todo 3. 数据流关联Join，由于info -> detail ，所以使用Interval join
               interval join 要求2个数据流为分组流（keyed stream）：按照关联字段分组keyBy
        */
        // 3-1. info和detail
        SingleOutputStreamOperator<String> joinStream = timeInfoStream
                .keyBy(
                        element -> JSON.parseObject(element).getJSONObject("operate_data").getString("id")
                )
                .intervalJoin(
                        timeDetailStream.keyBy(
                                element -> JSON.parseObject(element).getJSONObject("operate_data").getString("order_id")
                        )
                )
                .between(Time.seconds(-1), Time.seconds(2))
                .process(new ProcessJoinFunction<String, String, String>() {
                    @Override
                    public void processElement(String info, String detail,
                                               Context ctx, Collector<String> out) throws Exception {
                        // 获取user_id和province_id
                        JSONObject infoJsonObject = JSON.parseObject(info).getJSONObject("operate_data");
                        Integer userId = infoJsonObject.getInteger("user_id");
                        Integer provinceId = infoJsonObject.getInteger("province_id");
                        // 将字段添加到detail中
                        JSONObject detailJsonObject = JSON.parseObject(detail).getJSONObject("operate_data");
                        detailJsonObject.put("user_id", userId);
                        detailJsonObject.put("province_id", provinceId);
                        // 订单日期
                        String createTime = detailJsonObject.getString("create_time");
                        String curDate = createTime.substring(0, 10) ;
                        detailJsonObject.put("cur_date", curDate) ;
                        // 输出
                        String output = detailJsonObject.toJSONString();
                        out.collect(output);
                    }
                });
        // 返回关联宽表数据流
        return joinStream;
    }
}