package source;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;
import org.omg.SendingContext.RunTime;

/**
 * Author itcast
 * Desc
 */
public class TransformationDemo02 {
    public static void main(String[] args) throws Exception {
        //1.env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setRuntimeMode(RuntimeExecutionMode.AUTOMATIC);
        env.setParallelism(1);
        //2.Source
        DataStreamSource<String> ds1 = env.fromElements("hadoop", "spark", "flink");
        DataStreamSource<String> ds2 = env.fromElements("hadoop", "spark", "flink");
        DataStreamSource<Long> ds3 = env.fromElements(1L,2L,3L);
        //3.Transformation
        // union 算子 保证两个数据流类型保持一致
        DataStream<String> result1 = ds1.union(ds2);
        // connect 算子 两个数据流类型可以不一样
        ConnectedStreams<String, Long> tempResult = ds1.connect(ds3);
        //interface CoMapFunction<IN1, IN2, OUT>
        tempResult.map(new CoMapFunction<String, Long, String>() {
            @Override
            public String map1(String value) throws Exception {
                return "String->String" +value;
            }

            @Override
            public String map2(Long value) throws Exception {
                return "Long->String" +value.toString();
            }
        });



        //4.Sink
        //result1.print();
        result1.print();

        //5.execute
        env.execute();
    }
}