import com.zuikaku.pojo.People;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

public class RichMapFlatMapDemo {
    public static void main(String[] args) {
        //Flink流处理案例
        //1.取得流运行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //env.setParallelism(1);
        //2.获得source
        DataStream<String> source = env.fromElements("zhangsan,18,100","lisi,20,120","wangwu,15,60");
        source.print("源数据(source)");
        //3.进行transformation并得到sink
        DataStream<People> sink = source.flatMap(new RichFlatMapFunction<String, People>() {
            @Override
            public void open(Configuration parameters) throws Exception {
                super.open(parameters);
                System.out.println("open");
            }
            @Override
            public void close() throws Exception {
                super.close();
                System.out.println("close");
            }
            @Override
            public void flatMap(String s, Collector<People> collector) throws Exception {
                String[] temp = s.split(",");
                String name = temp[0];
                int age = Integer.parseInt(temp[1]);
                float weight =Float.parseFloat(temp[2]);
                People p = new People(name,age,weight);
                collector.collect(p);
            }
        });
        sink.print("处理后(sink)");
        //4.开启job(job默认并行执行，且无顺序)
        try {
            env.execute("myJob");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
