package com.atguigu.flink.chapter05.transform;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.exc.StreamReadException;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;

/*
Connect:
    把两个流合并成一个流，而且这两个流的类型可以不一致

    1、只能两个流进行合并
    2、两个流的类型可以不一致
 */
public class ConnectDemo {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port",2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
        DataStreamSource<Integer> stream1 = env.fromElements(1, 2, 3, 4, 5);
        DataStreamSource<String> stream2 = env.fromElements("a", "b", "c", "d", "e");
        ConnectedStreams<Integer, String> cs = stream1.connect(stream2);
        SingleOutputStreamOperator<String> s1 = cs.map(new CoMapFunction<Integer, String, String>() {
            @Override
            public String map1(Integer value) throws Exception {
                return value + ">";
            }

            @Override
            public String map2(String value) throws Exception {
                return value + "<";
            }
        });

        s1.print();

        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

/*
Connect :
    把两个流合并成一个流，而且这两个流的类型可以不一致

    1、只能两个流进行合并
    2、两个流的类型可以不一致
 */
//public class ConnectDemo {
//    public static void main(String[] args) {
//        Configuration conf = new Configuration();
//        conf.setInteger("rest.port",2000);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
//        env.setParallelism(1);
//        DataStreamSource<Integer> stream = env.fromElements(1, 2, 3, 4, 5);
//        DataStreamSource<String> stream1 = env.fromElements("a", "b", "c", "d", "e");
//        ConnectedStreams<Integer, String> cs = stream.connect(stream1);
//        SingleOutputStreamOperator<String> s1 = cs.map(new CoMapFunction<Integer, String, String>() {
//            @Override
//            public String map1(Integer value) throws Exception {
//                return value + ">";
//            }
//
//            @Override
//            public String map2(String value) throws Exception {
//                return value + "<";
//            }
//        });
//        s1.print();
//
//        try {
//            env.execute();
//        } catch (Exception e) {
//            e.printStackTrace();
//        }
//    }
//}

/*
Connect :
       把两个流合并成一个流，而且这两个流的类型可以不一致

1、只能两个流进行合并
2、两个流的类型可以不一致
 */
//public class ConnectDemo {
//    public static void main(String[] args) {
//        Configuration conf = new Configuration();
//        conf.setInteger("rest.port",2000);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
//        env.setParallelism(1);
//        DataStreamSource<Integer> stream = env.fromElements(1, 2, 4, 5, 7);
//        DataStreamSource<String> stream1 = env.fromElements("a", "b", "c", "d", "e");
//        ConnectedStreams<Integer, String> cs = stream.connect(stream1);
//        SingleOutputStreamOperator<String> s1 = cs.map(new CoMapFunction<Integer, String, String>() {
//            @Override
//            public String map1(Integer value) throws Exception {
//                return value + ">";
//            }
//
//            @Override
//            public String map2(String s) throws Exception {
//                return s + "<";
//            }
//        });
//
//        s1.print();
//
//        try {
//            env.execute();
//        } catch (Exception e) {
//            e.printStackTrace();
//        }
//    }
//}