package net.bw.realtime.jtp.dwd.trade.job;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import net.bw.realtime.jtp.common.utils.DateTimeUtil;
import net.bw.realtime.jtp.common.utils.DorisUtil;
import net.bw.realtime.jtp.common.utils.KafkaUtil;
import net.bw.realtime.jtp.dwd.trade.function.LoadProvinceDimMapFunction;
import net.bw.realtime.jtp.dwd.trade.function.RequestDicDimAsyncFunction;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;
import java.time.Duration;
import java.util.concurrent.TimeUnit;

/*
 * @ Author：liuyawei
 * @ Date：2025-06-04
 */
public class JtpOrderDetailDorisDwdJob {

    public static void main(String[] args) throws Exception {

        // 1.创建流执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置并行度
        env.setParallelism(1);

        // 2.读取数据
        DataStream<String> kafkaStream = KafkaUtil.consumerKafka(env, "topic-mall");
        // 输出
        //kafkaStream.print();

        // 3.数据处理
        DataStream<String> handledStream = handle(kafkaStream);

        // 4.存储数据到Doris
        DorisUtil.saveToDoris(handledStream, "jtp_mall_doris", "dwd_order_detail" );

        // 启动程序
        env.execute("JtpOrderDetailDorisDwdJob");

    }

    // 数据处理`
    private static DataStream<String> handle(DataStream<String> kafkaStream) {

        // 1.interval join : order_info join order_detail
        DataStream<String> joinStream = intervalJoin(kafkaStream);
        // 输出
        //joinStream.print();

        // 2.补全省份维度信息
        DataStream<String> provinceStream = joinStream.map(new LoadProvinceDimMapFunction());
        // 输出
        //provinceStream.print();

        // 3.通过异步IO，补全字典维度字段
        DataStream<String> dwdStream = AsyncDataStream.orderedWait(
                provinceStream,
                new RequestDicDimAsyncFunction(),
                10000,
                TimeUnit.MILLISECONDS,
                100
        );

        // 输出
        //dwdStream.print();

        return dwdStream;
    }

    // interval join : order_info join order_detail
    private static DataStream<String> intervalJoin(DataStream<String> kafkaStream) {

        //  1.获取order_info
        SingleOutputStreamOperator<String> infoStream = kafkaStream.filter(new FilterFunction<String>() {
            @Override
            public boolean filter(String value) throws Exception {
                // 解析json
                JSONObject jsonObject = JSON.parseObject(value);
                // 通过表名过滤出订单信息
                return "order_info".equals(jsonObject.getString("table_name"))
                        && "insert".equals(jsonObject.getString("operator_type"));
            }
        });

        // 2.获取order_detail
        SingleOutputStreamOperator<String> detailStream = kafkaStream.filter(new FilterFunction<String>() {
            @Override
            public boolean filter(String value) throws Exception {
                // 解析json
                JSONObject jsonObject = JSON.parseObject(value);
                // 通过表名过滤出订单明细信息
                return "order_detail".equals(jsonObject.getString("table_name"))
                        && "insert".equals(jsonObject.getString("operator_type"));
            }
        });

        // 3.设置水位线
        SingleOutputStreamOperator<String> infoWaterMarkStream = infoStream.assignTimestampsAndWatermarks(
                WatermarkStrategy.<String>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner(new SerializableTimestampAssigner<String>() {
                            @Override
                            public long extractTimestamp(String element, long recordTimestamp) {
                                // 解析json
                                JSONObject jsonObject = JSON.parseObject(element);
                                //  获取时间戳
                                String ts = jsonObject.getJSONObject("operator_data").getString("create_time");
                                // 返回时间戳
                                return DateTimeUtil.convertStringToLong(ts, "yyyy-MM-dd HH:mm:ss");
                            }
                        })
        );

        SingleOutputStreamOperator<String> detailWaterMarkStream = detailStream.assignTimestampsAndWatermarks(
                WatermarkStrategy.<String>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner(new SerializableTimestampAssigner<String>() {
                            @Override
                            public long extractTimestamp(String element, long recordTimestamp) {
                                // 解析json
                                JSONObject jsonObject = JSON.parseObject(element);
                                //  获取时间戳
                                String ts = jsonObject.getJSONObject("operator_data").getString("create_time");
                                // 返回时间戳
                                return DateTimeUtil.convertStringToLong(ts, "yyyy-MM-dd HH:mm:ss");
                            }
                        })
        );

        // 4.interval join : order_info join order_detail 先KeyBy
        SingleOutputStreamOperator<String> processStream = infoWaterMarkStream
                .keyBy(x -> JSON.parseObject(x).getJSONObject("operator_data").getString("id"))
                .intervalJoin(
                        detailWaterMarkStream.keyBy(x -> JSON.parseObject(x).getJSONObject("operator_data").getString("order_id"))
                )
                .between(Time.seconds(-1), Time.seconds(2))
                .process(new ProcessJoinFunction<String, String, String>() {
                    @Override
                    public void processElement(String left, String right, Context ctx, Collector<String> out) throws Exception {

                        // 从order_info中获取user_id,province_id
                        JSONObject leftJson = JSON.parseObject(left).getJSONObject("operator_data");
                        Integer userId = leftJson.getInteger("user_id");
                        Integer provinceId = leftJson.getInteger("province_id");

                        // 将字段添加到order_detail中
                        JSONObject rightJson = JSON.parseObject(right).getJSONObject("operator_data");
                        rightJson.put("user_id", userId);
                        rightJson.put("province_id", provinceId);

                        // 获取订单日期
                        String createTime = rightJson.getString("create_time");
                        String curDate = createTime.substring(0, 10);
                        rightJson.put("cur_date", curDate);

                        // 输出
                        out.collect(rightJson.toJSONString());
                    }
                });

        // 5.返回关联后的宽表数据
        return processStream;
    }


}
