package com.zhang.hadoop.flink.test3;

import com.zhang.hadoop.flink.base.Event;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.ArrayList;
import java.util.List;

/**
 * @author: zhang yufei
 * @createTime:2022/5/15 10:12
 * @description:
 */
public class TransformMapTest {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        List<Event> events = new ArrayList<>();
        events.add(new Event("yanghui", "./yindao", 1000L));
        events.add(new Event("yuping", "./siwajiao", 2000L));
        events.add(new Event("yangdan", "./gangmen", 2000L));
        events.add(new Event("jingru", "./niaodao", 2000L));
        DataStreamSource<Event> stream = env.fromCollection(events);

        //进行转换计算，提取user字段

        //1.使用自定义类，实现MapFunction接口
        SingleOutputStreamOperator<String> result1 = stream.map(new Mapper());

        //2.使用匿名类实现MapFunction接口
        SingleOutputStreamOperator<Object> result2 = stream.map(new MapFunction<Event, Object>() {
            @Override
            public Object map(Event event) throws Exception {
                return event.user;
            }
        });

        //3.传入lambda表达式
        SingleOutputStreamOperator<String> result3 = stream.map(data -> data.user);

        result1.print();
        result2.print();
        result3.print();
        env.execute();
    }

    //自定义MapFunction
    public static class Mapper implements MapFunction<Event, String> {

        @Override
        public String map(Event event) throws Exception {
            return event.user;
        }
    }
}
