package cn.xuexiyuan.flinkstudy.transformation;

import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;

import java.util.Arrays;

/**
 * @Description:
 * @Author 左龙龙
 * @Date 21-3-23
 * @Version 1.0
 **/
public class TransformationDemo02 {
    public static void main(String[] args) throws Exception {
        // 0.env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 1.source
        // env.fromElements(可变参数)
        DataStreamSource<String> ds1 = env.fromElements("flink flink flink", "flink");
        // env.fromCollection(各种集合)
        DataStreamSource<String> ds2 = env.fromCollection(Arrays.asList("hadoop hadoop hadoop", "hadoop"));
        // env.generateSequence(开始, 结束)
        DataStreamSource<Long> ds3 = env.generateSequence(0, 10);


        // 2.transformation
        // union 只能合并同类型
        DataStream<String> union = ds1.union(ds2);
        // connect 可以合并不用类型, connect 之后需要做其他处理不能直接 print 输出
        ConnectedStreams<String, Long> connect = ds2.connect(ds3);
        SingleOutputStreamOperator<String> map = connect.map(new CoMapFunction<String, Long, String>() {
            @Override
            public String map1(String s) throws Exception {
                return "String: " + s;
            }

            @Override
            public String map2(Long aLong) throws Exception {
                return "long: " + aLong;
            }
        });

        // 3.sink
        union.print();
        // connect 之后需要做其他处理不能直接 print 输出
        // connect.print();
        map.print();

        // 4.excute
        env.execute("TransformationDemo02");

    }
}
