package com.raylu.realtime.app.dwm;

import com.alibaba.fastjson.JSON;
import com.raylu.realtime.bean.OrderWide;
import com.raylu.realtime.bean.PaymentInfo;
import com.raylu.realtime.bean.PaymentWide;
import com.raylu.realtime.utils.DateUtil;
import com.raylu.realtime.utils.KafkaSinkUtil;
import com.raylu.realtime.utils.KafkaSourceUtil;
import com.raylu.realtime.utils.PropertiesUtil;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.Properties;

/**
 * Description:
 * <p>
 * Create by lucienoz on 2022/1/4.
 * Copyright © 2022 lucienoz. All rights reserved.
 */
public class PaymentWideApp {
    public static void main(String[] args) throws Exception {
        Properties load = PropertiesUtil.load("config.properties");
        //TODO 1. 准备运行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment();
        env.setParallelism(4);
        //TODO 2. 设置检查点
//        env.enableCheckpointing(5000L);
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60*1000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.seconds(3L),Time.days(3L)));
//        env.setStateBackend(new FsStateBackend(load.getProperty("payment.wide.app.fsstatebackend.url")));
//        System.setProperty("HADOOP_USER_NAME", "raylu");
        //TODO 3. 配置读入流
        KeyedStream<PaymentInfo, String> paymentInfoKeyedStream = env
                .addSource(KafkaSourceUtil.getKafkaSource(load.getProperty("payment.wide.app.kafka.source-topic1"), load.getProperty("payment.wide.app.kafka.group-id")))
                .map(r -> {
                    PaymentInfo paymentInfo = JSON.parseObject(r, PaymentInfo.class);
                    Long epochMilli = DateUtil.getEpochMilli(paymentInfo.getCreate_time());
                    paymentInfo.setTs(epochMilli);
                    return paymentInfo;
                })
                .assignTimestampsAndWatermarks(WatermarkStrategy.<PaymentInfo>forBoundedOutOfOrderness(Duration.ofSeconds(3L))
                        .withTimestampAssigner(new SerializableTimestampAssigner<PaymentInfo>() {
                            @Override
                            public long extractTimestamp(PaymentInfo element, long recordTimestamp) {
                                return element.getTs();
                            }
                        }))
                .keyBy(r -> r.getOrder_id());

        KeyedStream<OrderWide, String> orderWideKeyedStream = env
                .addSource(KafkaSourceUtil.getKafkaSource(load.getProperty("payment.wide.app.kafka.source-topic2"), load.getProperty("payment.wide.app.kafka.group-id")))
                .map(r -> JSON.parseObject(r, OrderWide.class))
                .assignTimestampsAndWatermarks(WatermarkStrategy.<OrderWide>forBoundedOutOfOrderness(Duration.ofSeconds(3L))
                        .withTimestampAssigner(new SerializableTimestampAssigner<OrderWide>() {
                            @Override
                            public long extractTimestamp(OrderWide element, long recordTimestamp) {
                                return element.getTs();
                            }
                        })).keyBy(r -> r.getOrder_id());
        //TODO 4. 将PaymentInfo数据与OrderWide数据进行时间区间关联
        SingleOutputStreamOperator<PaymentWide> resultDS = paymentInfoKeyedStream.intervalJoin(orderWideKeyedStream)
                .between(Time.seconds(-1800L), Time.seconds(10L))
                .process(new ProcessJoinFunction<PaymentInfo, OrderWide, PaymentWide>() {
                    @Override
                    public void processElement(PaymentInfo left, OrderWide right, ProcessJoinFunction<PaymentInfo, OrderWide, PaymentWide>.Context ctx, Collector<PaymentWide> out) throws Exception {
                        PaymentWide paymentWide = new PaymentWide();
                        BeanUtils.copyProperties(paymentWide, right);
                        BeanUtils.copyProperties(paymentWide, left);
                        out.collect(paymentWide);
                    }
                });

        //TODO 5. 将关联数据放入DWM_PAYMENT_WIDE主题中
        resultDS
                .map(JSON::toJSONString)
                .addSink(KafkaSinkUtil.getKafkaSink(load.getProperty("payment.wide.app.kafka.sink-topic")));
        resultDS.print();

        env.execute();
    }
}
