package cn.kgc.gmall.app.dwm;

import cn.kgc.gmall.bean.OrderWide;
import cn.kgc.gmall.bean.PaymentInfo;
import cn.kgc.gmall.bean.PaymentWide;
import cn.kgc.gmall.utils.DateTimeUtil;
import cn.kgc.gmall.utils.MyKafkaUtils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Duration;

/**
 * 支付宽表
 *  将支付表和订单宽表中的信息进行合并
 */
public class PaymentWideApp {
    public static void main(String[] args) throws Exception {
        // 模板代码 运行环境  检查点
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        // 2.检查点相关设置
        env.enableCheckpointing(5*1000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        env.getCheckpointConfig()
                .enableExternalizedCheckpoints(
                        CheckpointConfig
                                .ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
                );
        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8020/gmall/flink/checkpoint"));
        System.setProperty("HADOOP_USER_NAME","atkgc");

        // 加载kafka主题
        String dwmOrderWideTopic = "dwm_order_wide"; // 订单宽表主题
        // 加载支付主题
        String dwdPaymentInfoTopic = "dwd_payment_info";

        // 双流合并之后往kafka里面写的主题
        String paymentWideSinkTopic = "dwm_payment_wide";

        // 定义组id
        String groupId = "payment_wide_group";

        // 加载支付流
        DataStreamSource<String> paymentSource = env.addSource(MyKafkaUtils.getKafkaConsumerSource(dwdPaymentInfoTopic, groupId));
        // 加载订单宽表
        DataStreamSource<String> orderWideSource = env.addSource(MyKafkaUtils.getKafkaConsumerSource(dwmOrderWideTopic, groupId));

        // 转换为实体类对象
        SingleOutputStreamOperator<PaymentInfo> jsonObjectPaymentSource = paymentSource.map(
                x -> JSON.parseObject(x, PaymentInfo.class)
        );
        SingleOutputStreamOperator<OrderWide> jsonObjectOrderWideSource = orderWideSource.map(
                x -> JSON.parseObject(x,OrderWide.class)
        );

        // 设置支付水位线
        SingleOutputStreamOperator<PaymentInfo> paymentSourceWatermarks = jsonObjectPaymentSource
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy.<PaymentInfo>forBoundedOutOfOrderness(Duration.ofSeconds(3L))
                                .withTimestampAssigner(
                                        new SerializableTimestampAssigner<PaymentInfo>() {
                                            @Override
                                            public long extractTimestamp(PaymentInfo element, long recordTimestamp) {

                                                return DateTimeUtil.toTs(element.getCallback_time());
                                            }
                                        }
                                )
                );
        // 设置订单宽表水位线
        SingleOutputStreamOperator<OrderWide> orderWideSourceWatermarks = jsonObjectOrderWideSource
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy.<OrderWide>forBoundedOutOfOrderness(Duration.ofSeconds(3L))
                                .withTimestampAssigner(
                                        new SerializableTimestampAssigner<OrderWide>() {
                                            @Override
                                            public long extractTimestamp(OrderWide element, long recordTimestamp) {

                                                return DateTimeUtil.toTs(element.getCreate_time());
                                            }
                                        }
                                )
                );

        // 合并双流之前 设置分组
        KeyedStream<PaymentInfo, Long> paymentKeyBy = paymentSourceWatermarks.keyBy(PaymentInfo::getOrder_id);
        KeyedStream<OrderWide, Long> orderWideKeyBy = orderWideSourceWatermarks.keyBy(OrderWide::getOrder_id);

        // 做双流合并
        // 下订单 什么时候支付   支付时间一定不能小于订单时间 订单时间要小于支付时间 15分钟 1800秒

        SingleOutputStreamOperator<PaymentWide> paymentWideStream = paymentKeyBy
                .intervalJoin(orderWideKeyBy)
                // 当你的订单时间遇到支付时间   订单时间可以比支付时间早1800秒   支付时间不能比订单时间早
                .between(Time.seconds(-1800), Time.seconds(0))
                .process(new ProcessJoinFunction<PaymentInfo, OrderWide, PaymentWide>() {
                    @Override
                    public void processElement(PaymentInfo left, OrderWide right, ProcessJoinFunction<PaymentInfo, OrderWide, PaymentWide>.Context ctx, Collector<PaymentWide> out) throws Exception {
                        // 将两个对象中的数据合并到一个对象中
                        out.collect(new PaymentWide(left, right));
                    }
                });

        // 最后写入kafka
        paymentWideStream.map(JSON::toJSONString)
                .addSink(MyKafkaUtils.getKafkaProducerSink(paymentWideSinkTopic));

        env.execute();

    }
}
