package com.camemax.controller;

import com.camemax.pojo.DealCompareEvent;
import com.camemax.pojo.OrderPayEvent;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;
import scala.Tuple2;

/*
* 通过Flink Window Join方式对双数据源做合并处理：
*   1. join() => DataStream.join().where()
*   2. DataStream[A].keyBy().intervalJoin(DataStream[B].keyBy()): DataStream[A] get the data from DataStream[B] in the time range of between()
*        => KeyedStream.intervalJoin().between([time_range])
* */
public class OrderCompareByJoin {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStream<String> orderEventStream = env.readTextFile(OrderCompare.class.getResource("/OrderLog.csv").getPath());
        DataStream<String> payEventStream = env.readTextFile(OrderCompare.class.getResource("/ReceiptLog.csv").getPath());

        // 准备数据流 —— OrderLog.csv
        DataStream<OrderPayEvent> payStream = orderEventStream.map((MapFunction<String, OrderPayEvent>) (data -> {
            String[] fields = data.split(",");
            return new OrderPayEvent(
                    Long.parseLong(fields[0])
                    , fields[1]
                    , fields[2]
                    , Long.parseLong(fields[3])
            );
        })).assignTimestampsAndWatermarks(WatermarkStrategy.<OrderPayEvent>noWatermarks().withTimestampAssigner(new SerializableTimestampAssigner<OrderPayEvent>() {
            @Override
            public long extractTimestamp(OrderPayEvent element, long recordTimestamp) {
                return element.getOrderTime() * 1000L;
            }
        })).filter( data -> !"".equals(data.getOrderToken()) );

        // 准备数据流 —— ReceiptLog.csv
        DataStream<DealCompareEvent> thirdPartyStream = payEventStream.map((MapFunction<String, DealCompareEvent>) (data -> {
            String[] fields = data.split(",");
            return new DealCompareEvent(
                    fields[0]
                    , fields[1]
                    , Long.parseLong(fields[2])
            );
        })).assignTimestampsAndWatermarks(WatermarkStrategy.<DealCompareEvent>noWatermarks()
                .withTimestampAssigner(new SerializableTimestampAssigner<DealCompareEvent>() {
                    @Override
                    public long extractTimestamp(DealCompareEvent element, long recordTimestamp) {
                        return element.getDealTimestamp() * 1000L;
                    }
                }));

        payStream.keyBy(OrderPayEvent::getOrderToken)
                .intervalJoin(thirdPartyStream.keyBy(DealCompareEvent::getOrderToken))
                .between(Time.seconds(-3),Time.seconds(5))
                .process(new ProcessJoinFunction<OrderPayEvent, DealCompareEvent, Tuple2<OrderPayEvent, DealCompareEvent>>() {
                    @Override
                    public void processElement(OrderPayEvent left, DealCompareEvent right, Context ctx, Collector<Tuple2<OrderPayEvent, DealCompareEvent>> out) throws Exception {
                        out.collect(new Tuple2<>(left, right));
                    }
                }).print();


        env.execute();

    }
}
