package com.zyh.flink.day02.transform;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoFlatMapFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.api.java.tuple.Tuple2;

import java.util.HashMap;
import java.util.Map;

public class ConnectTest {
    public static void main(String[] args) throws Exception{
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度
        //environment.setParallelism(1);

        SingleOutputStreamOperator<Tuple2<String, String>> dataStream1 = environment.socketTextStream("hadoop10", 9991)
                .map(new MapFunction<String, Tuple2<String, String>>() {
                    @Override
                    public Tuple2<String, String> map(String s) throws Exception {
                        String[] strings = s.split("\\s+");
                        return Tuple2.of(strings[0], strings[1]);
                    }
                });
        SingleOutputStreamOperator<Tuple3<String, String, String>> dataStream2 = environment.socketTextStream("hadoop10", 9992)
                .map(new MapFunction<String, Tuple3<String, String, String>>() {
                    @Override
                    public Tuple3<String, String, String> map(String s) throws Exception {
                        String[] strings = s.split("\\s+");
                        return Tuple3.of(strings[0], strings[1], strings[2]);
                    }
                });

        SingleOutputStreamOperator<Tuple3<String, String, String>> dataStream3 = dataStream1.connect(dataStream2)
                .keyBy(category->category.f0,product->product.f2)
                .flatMap(new CoFlatMapFunction<Tuple2<String, String>, Tuple3<String, String, String>, Tuple3<String, String, String>>() {
                    private Map<String, String> map = new HashMap<>();

                    @Override
                    public void flatMap1(Tuple2<String, String> category, Collector<Tuple3<String, String, String>> collector) throws Exception {
                        map.put(category.f0, category.f1);
                    }

                    @Override
                    public void flatMap2(Tuple3<String, String, String> product, Collector<Tuple3<String, String, String>> collector) throws Exception {
                        if (map.get(product.f2) != null) {
                            collector.collect(Tuple3.of(product.f0, product.f1, map.get(product.f2)));
                        }
                    }
                });

        dataStream3.print();

        environment.execute("ConnectJob");
    }
}
