package cn._51doit.flink.day03;

import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;

import java.util.HashMap;

public class ConnectDemo1 {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //spark
        DataStreamSource<String> lines1 = env.socketTextStream("localhost", 8888);

        SingleOutputStreamOperator<String> upper1 = lines1.map(String::toUpperCase).setParallelism(2);

        //1
        //4
        DataStreamSource<String> lines2 = env.socketTextStream("localhost", 9999);

        SingleOutputStreamOperator<Integer> nums = lines2.map(Integer::parseInt);

        //将两个流connect到一起
        ConnectedStreams<String, Integer> connectedStreams = upper1.connect(nums);

        SingleOutputStreamOperator<String> res = connectedStreams.map(new CoMapFunction<String, Integer, String>() {

            //共享状态（变量、中间结果）
            //private HashMap<String, Integer> state;

            //对第一个DataStream中的数据进行处理的
            @Override
            public String map1(String value) throws Exception {

                return value;
            }

            //对第二个DataStream中的数据进行处理的
            @Override
            public String map2(Integer value) throws Exception {
                return value.toString();
            }
        });

        res.print();

        env.execute();

    }
}
