package cn._51doit.flink.day09;

import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.JoinFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple5;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;

import java.time.Duration;

/**
 * 将两个数据流，进行join
 *
 * 如果让两个流能够join上，必须满足一下两个条件
 * 1.由于数据是分散在多台机器上，必须将join条件相同的数据通过网络传输到同一台机器的同一个分区中（按照条件进行KeyBy）
 * 2.让每个流中的数据都放慢脚本，等等对方（划分相同类型、长度一样的窗口）
 *
 *
 */
public class EventTimeTumblingWindowJoin {

    public static void main(String[] args) throws Exception {


        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1000,c1,300
        //4999,c1,300
        //5000,c2,200
        DataStreamSource<String> lines1 = env.socketTextStream("localhost", 8888);
        //1200,c1,图书
        //5001,c2,家具
        DataStreamSource<String> lines2 = env.socketTextStream("localhost", 9999);

        //期望返回的数据
        //1000,c1,300,1200,图书
        //按照EventTime进行join，窗口长度为5000秒，使用新的提取EventTime生成WaterMark的API
        SingleOutputStreamOperator<String> lines1WithWaterMark = lines1.assignTimestampsAndWatermarks(WatermarkStrategy.<String>forBoundedOutOfOrderness(Duration.ofSeconds(0)).withTimestampAssigner(new SerializableTimestampAssigner<String>() {
            @Override
            public long extractTimestamp(String element, long recordTimestamp) {
                return Long.parseLong(element.split(",")[0]);
            }
        }));

        SingleOutputStreamOperator<Tuple3<Long, String, String>> tpStream1WithWaterMark = lines1WithWaterMark.map(new MapFunction<String, Tuple3<Long, String, String>>() {
            @Override
            public Tuple3<Long, String, String> map(String input) throws Exception {
                String[] fields = input.split(",");
                return Tuple3.of(Long.parseLong(fields[0]), fields[1], fields[2]);
            }
        });

        SingleOutputStreamOperator<String> lines2WithWaterMark = lines2.assignTimestampsAndWatermarks(WatermarkStrategy.<String>forBoundedOutOfOrderness(Duration.ofSeconds(0)).withTimestampAssigner(new SerializableTimestampAssigner<String>() {
            @Override
            public long extractTimestamp(String element, long recordTimestamp) {
                return Long.parseLong(element.split(",")[0]);
            }
        }));

        SingleOutputStreamOperator<Tuple3<Long, String, String>> tpStream2WithWaterMark = lines2WithWaterMark.map(new MapFunction<String, Tuple3<Long, String, String>>() {
            @Override
            public Tuple3<Long, String, String> map(String input) throws Exception {
                String[] fields = input.split(",");
                return Tuple3.of(Long.parseLong(fields[0]), fields[1], fields[2]);
            }
        });
        //https://nightlies.apache.org/flink/flink-docs-release-1.14/docs/dev/datastream/operators/joining/
        //期望得到的数据
        //c1,图书,300
        DataStream<Tuple5<Long, String, String, Long, String>> res = tpStream1WithWaterMark.join(tpStream2WithWaterMark)
                .where(t1 -> t1.f1)
                .equalTo(t2 -> t2.f1)
                .window(TumblingEventTimeWindows.of(Time.seconds(5)))
                .apply(new JoinFunction<Tuple3<Long, String, String>, Tuple3<Long, String, String>, Tuple5<Long, String, String, Long, String>>() {
                    //窗口触发后，条件相同的，并且在同一个窗口内的数据，会传入到join方法中
                    @Override
                    public Tuple5<Long, String, String, Long, String> join(Tuple3<Long, String, String> first, Tuple3<Long, String, String> second) throws Exception {
                        return Tuple5.of(first.f0, first.f1, first.f2, second.f0, second.f2);
                    }
                });

        res.print();

        env.execute();
    }
}
