package com.atbeijing.gmall.realtime.app.dwm;

import com.alibaba.fastjson.JSON;
import com.atbeijing.gmall.realtime.bean.order.OrderWide;
import com.atbeijing.gmall.realtime.bean.payment.PaymentInfo;
import com.atbeijing.gmall.realtime.bean.payment.PaymentWide;
import com.atbeijing.gmall.realtime.utils.DateTimeUtil;
import com.atbeijing.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

/**
 * 支付宽表
 */
public class PaymentWideApp {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);

        //设置检查点
//        env.enableCheckpointing(5000L,CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(3000);
//        //job 取消后是否保存检查点
//        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,5000));
//        env.setStateBackend(new FsStateBackend("hdfs://hadoop202:8020/gmall/ck"));
//        System.setProperty("HADOOP_USER_NAME","wjg");

        //读取kafka
        String groupId = "payment_wide_group";
        String paymentInfoSourceTopic = "dwd_payment_info";
        String orderWideSourceTopic = "dwm_order_wide";
        String paymentWideSinkTopic = "dwm_payment_wide";

        DataStreamSource<String> orderWideSource = env.addSource(MyKafkaUtil.getKafkaSource(orderWideSourceTopic, groupId));
        DataStreamSource<String> paymentInfoSource = env.addSource(MyKafkaUtil.getKafkaSource(paymentInfoSourceTopic, groupId));

        //处理订单宽表流
        KeyedStream<OrderWide, Long> orderWideKeyedStream = orderWideSource
                .map(r -> JSON.parseObject(r, OrderWide.class))
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy.<OrderWide>forMonotonousTimestamps()
                                .withTimestampAssigner(new SerializableTimestampAssigner<OrderWide>() {
                                    @Override
                                    public long extractTimestamp(OrderWide element, long recordTimestamp) {
                                        return DateTimeUtil.toTs(element.getCreate_time());
                                    }
                                })
                )
                .keyBy(r -> r.getOrder_id());

        //处理支付宽表流
        KeyedStream<PaymentInfo, Long> paymentInfoKeyedStream = paymentInfoSource
                .map(r -> JSON.parseObject(r, PaymentInfo.class))
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy.<PaymentInfo>forMonotonousTimestamps()
                                .withTimestampAssigner(new SerializableTimestampAssigner<PaymentInfo>() {
                                    @Override
                                    public long extractTimestamp(PaymentInfo element, long recordTimestamp) {
                                        return DateTimeUtil.toTs(element.getCallback_time());
                                    }
                                })
                )
                .keyBy(r -> r.getOrder_id());


        //一对多,用一表intervalJoin多表
        SingleOutputStreamOperator<String> paymentWideDS = paymentInfoKeyedStream
                //两条流相同key的数据放到一起处理
                .intervalJoin(orderWideKeyedStream)
                //先下单,再支付,30分钟内支付,下单信息一定在支付前30分钟内
                .between(Time.seconds(-1800), Time.seconds(0))
                .process(new ProcessJoinFunction<PaymentInfo, OrderWide, PaymentWide>() {
                    @Override
                    public void processElement(PaymentInfo paymentInfo, OrderWide orderWide, Context ctx, Collector<PaymentWide> out) throws Exception {
                        out.collect(new PaymentWide(paymentInfo, orderWide));
                    }
                })
                .map(r -> JSON.toJSONString(r));


        paymentWideDS.print();

        paymentWideDS.addSink(MyKafkaUtil.getKafkaSink(paymentWideSinkTopic));

        env.execute();
    }
}
