package com.flink.demo.source02;

import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * description
 *
 * @author zsyoung@qq.com
 * 2020/7/8 0:37
 */
public class StreamingDemo {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 获取数据源
        DataStreamSource<MyStreamingSource.Item> text = env.addSource(new MyStreamingSource()).setParallelism(1);
        //打印name和id
//        SingleOutputStreamOperator<MyStreamingSource.Item> item = text.map((MapFunction<MyStreamingSource.Item, MyStreamingSource.Item>) value -> value);

        //利用map函数只打印name
//        SingleOutputStreamOperator<Object> item = text.map((MapFunction<MyStreamingSource.Item, Object>) Obj -> Obj.getName());

        //利用flatMap函数平铺打印
//        SingleOutputStreamOperator<Object> item = text.flatMap((FlatMapFunction<MyStreamingSource.Item, Object>) (item1, collector) -> {
//            String name = item1.getName();
//            collector.collect(name);
//        });

        //利用filter字段过滤出来id为偶数的字段
        SingleOutputStreamOperator<MyStreamingSource.Item> item = text.filter((FilterFunction<MyStreamingSource.Item>) item12 -> item12.getId() % 2 == 0);



        item.print().setParallelism(1);
        env.execute("user defined streaming job");

    }
}
