package com.atguigu.day07;

import com.atguigu.bean.OrderEvent;
import com.atguigu.bean.TxEvent;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

public class Flink08_JoinFunction {

    public static void main(String[] args) throws Exception {

        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //2.读取支付流数据
//        DataStreamSource<String> orderStrDS = env.readTextFile("input/OrderLog.csv");
        DataStreamSource<String> orderStrDS = env.socketTextStream("hadoop102", 8888);

        //3.读取到账流数据
//        DataStreamSource<String> receiptStrDS = env.readTextFile("input/ReceiptLog.csv");
        DataStreamSource<String> receiptStrDS = env.socketTextStream("hadoop102", 9999);

        //4.将2个流转换为JavaBean对象
        SingleOutputStreamOperator<OrderEvent> orderEventDS = orderStrDS.map(line -> {
            String[] fields = line.split(",");
            return new OrderEvent(Long.parseLong(fields[0]),
                    fields[1],
                    fields[2],
                    Long.parseLong(fields[3]));
        }).filter(data -> data.getTxId() != null);
//                .assignTimestampsAndWatermarks(WatermarkStrategy.<OrderEvent>forMonotonousTimestamps()
//                        .withTimestampAssigner(new SerializableTimestampAssigner<OrderEvent>() {
//                            @Override
//                            public long extractTimestamp(OrderEvent element, long recordTimestamp) {
//                                return element.getEventTime() * 1000L;
//                            }
//                        }));
        SingleOutputStreamOperator<TxEvent> receiptDS = receiptStrDS.map(line -> {
            String[] fields = line.split(",");
            return new TxEvent(fields[0],
                    fields[1],
                    Long.parseLong(fields[2]));
        });
//                .assignTimestampsAndWatermarks(WatermarkStrategy.<TxEvent>forMonotonousTimestamps()
//                .withTimestampAssigner(new SerializableTimestampAssigner<TxEvent>() {
//                    @Override
//                    public long extractTimestamp(TxEvent element, long recordTimestamp) {
//                        return element.getEventTime() * 1000L;
//                    }
//                }));

        //5.JOIN并处理JOIN之后的结果
        SingleOutputStreamOperator<Tuple2<OrderEvent, TxEvent>> result = orderEventDS.keyBy(OrderEvent::getTxId)
                .intervalJoin(receiptDS.keyBy(TxEvent::getTxId))
                .inProcessingTime()
                .between(Time.seconds(-10), Time.seconds(10))
                //.lowerBoundExclusive()
                //.upperBoundExclusive()
                .process(new ProcessJoinFunction<OrderEvent, TxEvent, Tuple2<OrderEvent, TxEvent>>() {
                    @Override
                    public void processElement(OrderEvent left, TxEvent right, Context ctx, Collector<Tuple2<OrderEvent, TxEvent>> out) throws Exception {
                        out.collect(new Tuple2<>(left, right));
                    }
                });

        //6.打印
        result.print();

        //8.启动
        env.execute();

    }

}
