package cn.azzhu.day02.transformations;

import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

import java.util.Arrays;

/**
 * flatMap
 * @author azzhu
 * @create 2020-09-18 00:31:58
 */
public class TransformationDemo2 {
    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        final DataStreamSource<String> lines = env.fromElements("spark flink hadoop","spark flink hbase");

        final SingleOutputStreamOperator<String> words = lines.flatMap((String line, Collector<String> out) ->
            Arrays.stream(line.split(" ")).forEach(out::collect)).returns(Types.STRING);

        //flatMap方法还可以传入RichFlatMapFunction

         words.print();
        env.execute("TransformationDemo2");
    }
}
