package com.project.capture7.app;

import com.project.capture5.bean.OrderLog;
import com.project.capture5.bean.ReceiptLog;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

/**
 * @author Shelly An
 * @create 2020/9/19 10:21
 * 实时对账监控 考虑数据乱序、数据迟到的场景
 */
public class OrderTxDetectWithIntervalJoin {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);


        SingleOutputStreamOperator<OrderLog> orderLogDS = env.readTextFile("Data/OrderLog.csv")
                .map(new MapFunction<String, OrderLog>() {
                    @Override
                    public OrderLog map(String value) throws Exception {
                        String[] datas = value.split(",");
                        return new OrderLog(Long.valueOf(datas[0]),
                                datas[1],
                                datas[2],
                                Long.valueOf(datas[3])
                        );
                    }
                })
                .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<OrderLog>(Time.seconds(3)) {
                    @Override
                    public long extractTimestamp(OrderLog element) {
                        return element.getEventTime() * 1000L;
                    }
                });

        SingleOutputStreamOperator<ReceiptLog> txDS = env.readTextFile("Data/ReceiptLog.csv")
                .map(new MapFunction<String, ReceiptLog>() {
                    @Override
                    public ReceiptLog map(String value) throws Exception {
                        String[] datas = value.split(",");
                        return new ReceiptLog(datas[0], datas[1], Long.valueOf(datas[2]));
                    }
                })
                .assignTimestampsAndWatermarks(new AscendingTimestampExtractor<ReceiptLog>() {
                    @Override
                    public long extractAscendingTimestamp(ReceiptLog element) {
                        return element.getEventTime() * 1000L;
                    }
                });

        //interval join
        //1 按照匹配的数据分组 交易码
        KeyedStream<OrderLog, String> orderKS = orderLogDS.keyBy(OrderLog::getTxId);
        KeyedStream<ReceiptLog, String> txKS = txDS.keyBy(ReceiptLog::getTxId);
        
        //2, 使用interval join关联
        SingleOutputStreamOperator<String> resultDS = orderKS.intervalJoin(txKS)
                .between(Time.seconds(-15), Time.seconds(15))
                .process(new ProcessJoinFunction<OrderLog, ReceiptLog, String>() {
                    @Override
                    public void processElement(OrderLog left,
                                               ReceiptLog right,
                                               Context ctx,
                                               Collector<String> out) throws Exception {
                        //匹配txid
                        if (left.getTxId().equals(right.getTxId())) {
                            out.collect("订单" + left.getTxId() + "对账成功");
                        }
                    }
                });

        resultDS.print();
        env.execute();
    }
}
