package com.galeno.day09;

import org.apache.flink.api.common.functions.JoinFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;

/**
 * @author galeno
 * @Title:
 * @Description:
 * @date 2021/10/2617:19
 * Innerjoin
 * 1.两个流的数据必须在一个未触发的窗口
 * 2.join的条件必须相同(类型相同,并且等值)
 *
 * 需求:两个流
 * 流1:(订单id,分类id,商品金额)实时流
 * o001,c10,2000
 * 流2:(分类id,分类名称)维度流
 * c10,图书
 * c11,手机
 *
 * 结果
 * c10,图书,2000
 * 按照分类在进行keyby聚合
 */
public class ProcessTimeTumbingWindowJoin {
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port",22222);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(conf);
        //事实流
        DataStreamSource<String> lines1 = env.socketTextStream("192.168.77.3", 9999);
        //维度流
        DataStreamSource<String> lines2 = env.socketTextStream("192.168.77.3", 9998);
        //按照processTime划分窗口,进行join
        SingleOutputStreamOperator<Tuple3<String, String, Double>> orderTupleStream = lines1.map(new MapFunction<String, Tuple3<String, String, Double>>() {
            @Override
            public Tuple3<String, String, Double> map(String value) throws Exception {
                String[] split = value.split(",");
                return Tuple3.of(split[0], split[1], Double.parseDouble(split[2]));

            }
        });
        SingleOutputStreamOperator<Tuple2<String, String>> categoryTupleStream = lines2.map(new MapFunction<String, Tuple2<String, String>>() {
            @Override
            public Tuple2<String, String> map(String value) throws Exception {
                String[] split = value.split(",");
                return Tuple2.of(split[0], split[1]);

            }
        });
        //如何将两个;流join在一起(数据是分散在多台机器上),必须将相同条件的数据通过网络传输到同一台机器的同一个分区内()
        //在相同的时间(窗口),相同的地点(同一个subTask)
        //将两个流按照join的条件进行keyby
        //将第一个流放到一个包装类中(TaggedUnion<T1,Null>)第一个字段有数据
        //将第二个流放到一个包装类中(TaggedUnion<Null,T2>)第二个字段有数据
        //然后再将两个流union到一起(要求数据类型一样,然后按照两个流的条件进行keyBy,然后划分窗口)
        categoryTupleStream.print();
        orderTupleStream.print();
        DataStream<Tuple4<String, String, Double, String>> joined = orderTupleStream.join(categoryTupleStream)
                .where(t1 -> t1.f1)//第一个流的条件
                .equalTo(t2 -> t2.f0) //第二个流的条件
                .window(TumblingProcessingTimeWindows.of(Time.seconds(30)))
                .apply(new JoinFunction<Tuple3<String, String, Double>, Tuple2<String, String>, Tuple4<String, String, Double, String>>() {
                    //窗口触发,并且两个流中有key相同,才会调用join方法
                    @Override
                    public Tuple4<String, String, Double, String> join(Tuple3<String, String, Double> first, Tuple2<String, String> second) throws Exception {

                        return Tuple4.of(first.f0, first.f1, first.f2, second.f1);

                    }
                });

        joined.print();
        env.execute();

    }
}
